From 90c2c304b3c4bb4a7baecef6bdd138f66fead4ca Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 9 Nov 2015 22:49:37 +0100 Subject: [PATCH 001/322] Add javadocs to IndexingStats.Stats This commit adds some basic javadocs to IndexingStats.Stats. A user on the forum asked for what getIndexingCurrent is so I figured i can just fix it in the code. --- .../index/indexing/IndexingStats.java | 71 ++++++++----------- .../indices/stats/IndexStatsIT.java | 6 +- 2 files changed, 32 insertions(+), 45 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java b/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java index 3df62994f96..07ca8af17e3 100644 --- a/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java +++ b/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java @@ -43,19 +43,14 @@ public class IndexingStats implements Streamable, ToXContent { private long indexTimeInMillis; private long indexCurrent; private long indexFailedCount; - private long deleteCount; private long deleteTimeInMillis; private long deleteCurrent; - private long noopUpdateCount; - private long throttleTimeInMillis; private boolean isThrottled; - Stats() { - - } + Stats() {} public Stats(long indexCount, long indexTimeInMillis, long indexCurrent, long indexFailedCount, long deleteCount, long deleteTimeInMillis, long deleteCurrent, long noopUpdateCount, boolean isThrottled, long throttleTimeInMillis) { this.indexCount = indexCount; @@ -87,26 +82,29 @@ public class IndexingStats implements Streamable, ToXContent { } } - public long getIndexCount() { - return indexCount; - } + /** + * The total number of indexing operations + */ + public long getIndexCount() { return indexCount; } - public long getIndexFailedCount() { - return indexFailedCount; - } + /** + * The number of failed indexing operations + */ + public long getIndexFailedCount() { return indexFailedCount; } - public TimeValue getIndexTime() { - return new TimeValue(indexTimeInMillis); - } + /** + * The total amount of time spend on executing index operations. + */ + public TimeValue getIndexTime() { return new TimeValue(indexTimeInMillis); } - public long getIndexTimeInMillis() { - return indexTimeInMillis; - } - - public long getIndexCurrent() { - return indexCurrent; - } + /** + * Returns the currently in-flight indexing operations. + */ + public long getIndexCurrent() { return indexCurrent;} + /** + * Returns the number of delete operation executed + */ public long getDeleteCount() { return deleteCount; } @@ -114,32 +112,21 @@ public class IndexingStats implements Streamable, ToXContent { /** * Returns if the index is under merge throttling control */ - public boolean isThrottled() { - return isThrottled; - } - - /** - * Gets the amount of time in milliseconds that the index has been under merge throttling control - */ - public long getThrottleTimeInMillis() { - return throttleTimeInMillis; - } + public boolean isThrottled() { return isThrottled; } /** * Gets the amount of time in a TimeValue that the index has been under merge throttling control */ - public TimeValue getThrottleTime() { - return new TimeValue(throttleTimeInMillis); - } + public TimeValue getThrottleTime() { return new TimeValue(throttleTimeInMillis); } - public TimeValue getDeleteTime() { - return new TimeValue(deleteTimeInMillis); - } - - public long getDeleteTimeInMillis() { - return deleteTimeInMillis; - } + /** + * The total amount of time spend on executing delete operations. + */ + public TimeValue getDeleteTime() { return new TimeValue(deleteTimeInMillis); } + /** + * Returns the currently in-flight delete operations + */ public long getDeleteCurrent() { return deleteCurrent; } diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index a87da6fc046..37ccd6057bf 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -301,7 +301,7 @@ public class IndexStatsIT extends ESIntegTestCase { //nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); stats = client().admin().indices().prepareStats().execute().actionGet(); - assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0l)); + assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l)); } public void testThrottleStats() throws Exception { @@ -339,7 +339,7 @@ public class IndexStatsIT extends ESIntegTestCase { refresh(); stats = client().admin().indices().prepareStats().execute().actionGet(); //nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); - done = stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis() > 0; + done = stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis() > 0; if (System.currentTimeMillis() - start > 300*1000) { //Wait 5 minutes for throttling to kick in fail("index throttling didn't kick in after 5 minutes of intense merging"); } @@ -374,7 +374,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexCount(), equalTo(3l)); assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0l)); assertThat(stats.getPrimaries().getIndexing().getTotal().isThrottled(), equalTo(false)); - assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0l)); + assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l)); assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites)); assertThat(stats.getTotal().getStore(), notNullValue()); assertThat(stats.getTotal().getMerge(), notNullValue()); From fbbb04b87e9819925bac9bc6772c794ec87499d7 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 27 Nov 2015 10:25:59 +0100 Subject: [PATCH 002/322] Add infrastructure to transactionally apply and reset dynamic settings This commit adds the infrastructure to make settings that are updateable resetable and changes the application of updates to be transactional. This means setting updates are either applied or not. If the application failes all values are rejected. This initial commit converts all dynamic cluster settings to make use of the new infrastructure. All cluster level dynamic settings are not resettable to their defaults or to the node level settings. The infrastructure also allows to list default values and descriptions which is not fully implemented yet. Values can be reset using a list of key or simple regular expressions. This has only been implemented on the java layer yet. For instance to reset all recovery settings to their defaults a user can just specify `indices.recovery.*`. This commit also adds strict settings validation, if a setting is unknown or if a setting can not be applied the entire settings update request will fail. --- .../ClusterUpdateSettingsRequest.java | 22 +- .../ClusterUpdateSettingsRequestBuilder.java | 22 + .../TransportClusterUpdateSettingsAction.java | 90 ++-- .../close/TransportCloseIndexAction.java | 28 +- .../delete/TransportDeleteIndexAction.java | 4 +- .../open/TransportOpenIndexAction.java | 6 +- .../action/support/DestructiveOperations.java | 26 +- .../client/transport/TransportClient.java | 4 - .../elasticsearch/cluster/ClusterModule.java | 107 ++--- .../cluster/InternalClusterInfoService.java | 54 +-- .../action/index/MappingUpdatedAction.java | 28 +- .../cluster/metadata/MetaData.java | 33 +- .../allocator/BalancedShardsAllocator.java | 59 +-- .../decider/AwarenessAllocationDecider.java | 57 +-- .../ClusterRebalanceAllocationDecider.java | 56 +-- .../ConcurrentRebalanceAllocationDecider.java | 27 +- .../decider/DiskThresholdDecider.java | 126 +++--- .../decider/EnableAllocationDecider.java | 53 +-- .../decider/FilterAllocationDecider.java | 62 +-- .../decider/ShardsLimitAllocationDecider.java | 30 +- .../SnapshotInProgressAllocationDecider.java | 29 +- .../decider/ThrottlingAllocationDecider.java | 49 +-- .../service/InternalClusterService.java | 37 +- .../settings/ClusterDynamicSettings.java | 38 -- .../common/settings/ClusterSettings.java | 170 ++++++++ .../settings/ClusterSettingsService.java | 53 +++ .../common/settings/Setting.java | 394 ++++++++++++++++++ .../common/settings/Settings.java | 9 + .../common/settings/SettingsService.java | 201 +++++++++ .../elasticsearch/common/unit/TimeValue.java | 24 ++ .../discovery/DiscoverySettings.java | 87 ++-- .../discovery/zen/ZenDiscovery.java | 43 +- .../zen/elect/ElectMasterService.java | 23 +- .../elasticsearch/gateway/GatewayService.java | 2 +- .../index/store/IndexStoreConfig.java | 39 +- .../elasticsearch/indices/IndicesService.java | 12 +- .../HierarchyCircuitBreakerService.java | 124 ++---- .../indices/recovery/RecoverySettings.java | 185 ++++---- .../indices/ttl/IndicesTTLService.java | 26 +- .../java/org/elasticsearch/node/Node.java | 9 +- .../org/elasticsearch/node/NodeModule.java | 8 +- .../node/settings/NodeSettingsService.java | 122 ------ .../settings/RestUpdateSettingsAction.java | 2 +- .../elasticsearch/search/SearchService.java | 23 +- .../snapshots/RestoreService.java | 33 +- .../elasticsearch/threadpool/ThreadPool.java | 77 +--- .../transport/TransportService.java | 44 +- .../admin/indices/upgrade/UpgradeIT.java | 8 +- .../master/IndexingMasterFailoverIT.java | 4 +- .../recovery/ReplicaRecoveryBenchmark.java | 2 +- .../BenchmarkNettyLargeMessages.java | 2 - .../OldIndexBackwardsCompatibilityIT.java | 2 +- .../bwcompat/RestoreBackwardsCompatIT.java | 2 +- .../cluster/ClusterInfoServiceIT.java | 11 +- .../cluster/ClusterModuleTests.java | 12 +- .../cluster/ClusterServiceIT.java | 4 +- .../cluster/MinimumMasterNodesIT.java | 24 +- .../elasticsearch/cluster/NoMasterNodeIT.java | 4 +- .../ack/AckClusterUpdateSettingsIT.java | 6 +- .../org/elasticsearch/cluster/ack/AckIT.java | 4 +- .../allocation/AwarenessAllocationIT.java | 6 +- .../cluster/allocation/ClusterRerouteIT.java | 16 +- .../allocation/AddIncrementallyTests.java | 6 +- .../allocation/AllocationCommandsTests.java | 8 +- .../allocation/AllocationPriorityTests.java | 4 +- .../allocation/AwarenessAllocationTests.java | 22 +- .../allocation/BalanceConfigurationTests.java | 53 ++- .../ClusterRebalanceRoutingTests.java | 14 +- .../allocation/DeadNodesAllocationTests.java | 6 +- .../allocation/FailedNodeRoutingTests.java | 2 +- .../allocation/FailedShardsRoutingTests.java | 12 +- .../routing/allocation/IndexBalanceTests.java | 6 +- .../NodeVersionAllocationDeciderTests.java | 6 +- .../allocation/RebalanceAfterActiveTests.java | 2 +- .../RoutingNodesIntegrityTests.java | 6 +- .../allocation/ShardVersioningTests.java | 2 +- .../ShardsLimitAllocationTests.java | 4 +- .../SingleShardNoReplicasRoutingTests.java | 4 +- .../TenShardsOneReplicaRoutingTests.java | 2 +- .../decider/DiskThresholdDeciderTests.java | 96 ++--- .../DiskThresholdDeciderUnitTests.java | 24 +- .../decider/EnableAllocationDeciderIT.java | 4 +- .../decider/EnableAllocationTests.java | 36 +- .../allocation/decider/MockDiskUsagesIT.java | 20 +- .../cluster/settings/ClusterSettingsIT.java | 146 ++++++- .../structure/RoutingIteratorTests.java | 4 +- .../breaker/MemoryCircuitBreakerTests.java | 7 +- .../common/settings/SettingTests.java | 282 +++++++++++++ .../common/settings/SettingsServiceTests.java | 125 ++++++ .../common/unit/TimeValueTests.java | 10 + .../common/util/BigArraysTests.java | 11 +- .../DiscoveryWithServiceDisruptionsIT.java | 12 +- .../zen/NodeJoinControllerTests.java | 5 +- .../discovery/zen/ZenDiscoveryIT.java | 2 +- .../PublishClusterStateActionTests.java | 17 +- .../gateway/GatewayMetaStateTests.java | 4 +- .../RecoveryBackwardsCompatibilityIT.java | 4 +- .../gateway/RecoveryFromGatewayIT.java | 4 +- .../index/TransportIndexFailuresIT.java | 2 +- .../index/store/CorruptedFileIT.java | 6 +- .../indices/IndicesLifecycleListenerIT.java | 4 +- .../memory/breaker/CircuitBreakerNoopIT.java | 4 +- .../breaker/CircuitBreakerServiceIT.java | 34 +- .../breaker/CircuitBreakerUnitTests.java | 5 +- .../indices/recovery/IndexRecoveryIT.java | 12 +- .../recovery/RecoverySourceHandlerTests.java | 5 +- .../state/CloseIndexDisableCloseAllIT.java | 4 +- .../indices/state/RareClusterStateIT.java | 4 +- .../store/IndicesStoreIntegrationIT.java | 10 +- .../DestructiveOperationsIntegrationIT.java | 8 +- .../recovery/RecoverySettingsTests.java | 24 +- .../elasticsearch/recovery/RelocationIT.java | 2 +- .../recovery/TruncatedRecoveryIT.java | 2 +- .../AbstractSnapshotIntegTestCase.java | 2 +- .../DedicatedClusterSnapshotRestoreIT.java | 28 +- .../ThreadPoolTypeSettingsValidatorTests.java | 73 ---- .../UpdateThreadPoolSettingsTests.java | 65 +-- .../AbstractSimpleTransportTestCase.java | 13 +- .../NettySizeHeaderFrameDecoderTests.java | 5 +- .../discovery/azure/AzureDiscovery.java | 6 +- .../discovery/ec2/Ec2Discovery.java | 6 +- .../discovery/gce/GceDiscovery.java | 6 +- .../MockInternalClusterInfoService.java | 12 +- .../search/MockSearchService.java | 10 +- .../test/ESAllocationTestCase.java | 19 +- .../test/ESBackcompatTestCase.java | 4 +- .../elasticsearch/test/ESIntegTestCase.java | 8 +- .../test/InternalTestCluster.java | 32 +- 128 files changed, 2527 insertions(+), 1724 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettings.java create mode 100644 core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java create mode 100644 core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java create mode 100644 core/src/main/java/org/elasticsearch/common/settings/Setting.java create mode 100644 core/src/main/java/org/elasticsearch/common/settings/SettingsService.java delete mode 100644 core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java create mode 100644 core/src/test/java/org/elasticsearch/common/settings/SettingTests.java create mode 100644 core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java delete mode 100644 core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 0090d7db057..96e90e1afc3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; -import java.util.Map; +import java.util.*; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; @@ -44,6 +44,8 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest transientReset = new HashSet<>(); + private Set persistentReset = new HashSet<>(); public ClusterUpdateSettingsRequest() { } @@ -51,7 +53,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest getTransientReset() { return Collections.unmodifiableSet(transientReset); } + + public Set getPersistentReset() { return Collections.unmodifiableSet(persistentReset); } + + public void addTransientResetKeys(Collection keys) { + transientReset.addAll(keys); + } + + public void addPersistentResetKeys(Collection keys) { + persistentReset.addAll(keys); + } + /** * Sets the transient settings to be updated. They will not survive a full cluster restart */ @@ -148,6 +162,8 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest(Arrays.asList(in.readStringArray())); + persistentReset = new HashSet<>(Arrays.asList(in.readStringArray())); readTimeout(in); } @@ -156,6 +172,8 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest keys) { + request.addTransientResetKeys(keys); + return this; + } + + public ClusterUpdateSettingsRequestBuilder addPersistentResetKeys(Collection keys) { + request.addPersistentResetKeys(keys); + return this; + } + + public ClusterUpdateSettingsRequestBuilder addTransientResetKeys(String... keys) { + request.addTransientResetKeys(Arrays.asList(keys)); + return this; + } + + public ClusterUpdateSettingsRequestBuilder addPersistentResetKeys(String... keys) { + request.addPersistentResetKeys(Arrays.asList(keys)); + return this; + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 73d14a2bb11..90cc68abe91 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -34,16 +34,19 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.ClusterState.builder; @@ -54,15 +57,17 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct private final AllocationService allocationService; - private final DynamicSettings dynamicSettings; + private final ClusterSettings dynamicSettings; + private final ClusterSettingsService clusterSettingsService; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + AllocationService allocationService, ClusterSettings dynamicSettings, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettingsService clusterSettingsService) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new); this.allocationService = allocationService; this.dynamicSettings = dynamicSettings; + this.clusterSettingsService = clusterSettingsService; } @Override @@ -73,8 +78,8 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) { // allow for dedicated changes to the metadata blocks, so we don't block those to allow to "re-enable" it - if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && request.persistentSettings().get(MetaData.SETTING_READ_ONLY) != null) || - request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && request.transientSettings().get(MetaData.SETTING_READ_ONLY) != null) { + if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.persistentSettings())) || + request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.transientSettings())) { return null; } return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); @@ -184,37 +189,58 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct Settings.Builder transientSettings = Settings.settingsBuilder(); transientSettings.put(currentState.metaData().transientSettings()); for (Map.Entry entry : request.transientSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - transientSettings.put(entry.getKey(), entry.getValue()); - transientUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error); - } + if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { + transientSettings.put(entry.getKey(), entry.getValue()); + transientUpdates.put(entry.getKey(), entry.getValue()); + changed = true; } else { - logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey()); + throw new IllegalArgumentException("transient setting [" + entry.getKey() + "], not dynamically updateable"); } } Settings.Builder persistentSettings = Settings.settingsBuilder(); persistentSettings.put(currentState.metaData().persistentSettings()); for (Map.Entry entry : request.persistentSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - persistentSettings.put(entry.getKey(), entry.getValue()); - persistentUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); - } + if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { + persistentSettings.put(entry.getKey(), entry.getValue()); + persistentUpdates.put(entry.getKey(), entry.getValue()); + changed = true; } else { - logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); + throw new IllegalArgumentException("persistent setting [" + entry.getKey() + "], not dynamically updateable"); } } + for (String entry : request.getPersistentReset()) { + Set strings = persistentSettings.internalMap().keySet(); + Set keysToRemove = new HashSet(); + for (String key : strings) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String keyToRemove : keysToRemove) { + persistentSettings.remove(keyToRemove); + persistentUpdates.remove(keyToRemove); + } + changed |= keysToRemove.isEmpty() == false; + } + + for (String entry : request.getTransientReset()) { + Set strings = transientSettings.internalMap().keySet(); + Set keysToRemove = new HashSet<>(); + for (String key : strings) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String keyToRemove : keysToRemove) { + transientSettings.remove(keyToRemove); + transientUpdates.remove(keyToRemove); + } + changed |= keysToRemove.isEmpty() == false; + } + + if (!changed) { return currentState; } @@ -224,14 +250,18 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct .transientSettings(transientSettings.build()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false); + boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) || MetaData.SETTING_READ_ONLY_SETTING.get(metaData.transientSettings()); if (updatedReadOnly) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } else { blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } - - return builder(currentState).metaData(metaData).blocks(blocks).build(); + ClusterState build = builder(currentState).metaData(metaData).blocks(blocks).build(); + Settings settings = build.metaData().settings(); + // now we try to apply things and if they are invalid we fail + // this dryRun will validate & parse settings but won't actually apply them. + clusterSettingsService.dryRun(settings); + return build; } }); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 2c25ee34f18..994fdcced1f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -31,31 +31,36 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** * Close index action */ -public class TransportCloseIndexAction extends TransportMasterNodeAction implements NodeSettingsService.Listener { +public class TransportCloseIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; private volatile boolean closeIndexEnabled; - public static final String SETTING_CLUSTER_INDICES_CLOSE_ENABLE = "cluster.indices.close.enable"; + public static final Setting CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.Cluster); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataIndexStateService indexStateService, - NodeSettingsService nodeSettingsService, ActionFilters actionFilters, + ClusterSettingsService clusterSettingsService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, DestructiveOperations destructiveOperations) { super(settings, CloseIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, CloseIndexRequest::new); this.indexStateService = indexStateService; this.destructiveOperations = destructiveOperations; - this.closeIndexEnabled = settings.getAsBoolean(SETTING_CLUSTER_INDICES_CLOSE_ENABLE, true); - nodeSettingsService.addListener(this); + this.closeIndexEnabled = CLUSTER_INDICES_CLOSE_ENABLE_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_INDICES_CLOSE_ENABLE_SETTING, this::setCloseIndexEnabled); + } + + private void setCloseIndexEnabled(boolean closeIndexEnabled) { + this.closeIndexEnabled = closeIndexEnabled; } @Override @@ -73,7 +78,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction listener) { destructiveOperations.failDestructive(request.indices()); if (closeIndexEnabled == false) { - throw new IllegalStateException("closing indices is disabled - set [" + SETTING_CLUSTER_INDICES_CLOSE_ENABLE + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace"); + throw new IllegalStateException("closing indices is disabled - set [" + CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey() + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace"); } super.doExecute(request, listener); } @@ -104,13 +109,4 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.Cluster); private volatile boolean destructiveRequiresName; @Inject - public DestructiveOperations(Settings settings, NodeSettingsService nodeSettingsService) { + public DestructiveOperations(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - destructiveRequiresName = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, false); - nodeSettingsService.addListener(this); + destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName); + } + + private void setDestructiveRequiresName(boolean destructiveRequiresName) { + this.destructiveRequiresName = destructiveRequiresName; } /** @@ -65,15 +70,6 @@ public final class DestructiveOperations extends AbstractComponent implements No } } - @Override - public void onRefreshSettings(Settings settings) { - boolean newValue = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, destructiveRequiresName); - if (destructiveRequiresName != newValue) { - logger.info("updating [action.operate_all_indices] from [{}] to [{}]", destructiveRequiresName, newValue); - this.destructiveRequiresName = newValue; - } - } - private static boolean hasWildcardUsage(String aliasOrIndex) { return "_all".equals(aliasOrIndex) || aliasOrIndex.indexOf('*') != -1; } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 33cf3479419..eb0639917c1 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -32,7 +32,6 @@ import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; @@ -43,12 +42,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index b2e793ba0ab..3739b4a3dc8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -19,9 +19,6 @@ package org.elasticsearch.cluster; -import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; @@ -29,7 +26,6 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateFilter; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; @@ -60,17 +56,16 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettingsService; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; -import org.elasticsearch.discovery.DiscoverySettings; -import org.elasticsearch.discovery.zen.ZenDiscovery; -import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.engine.EngineConfig; @@ -81,21 +76,13 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; -import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesWarmer; -import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.DefaultSearchContext; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; +import java.util.*; /** * Configures classes and services that affect the entire cluster. @@ -122,7 +109,7 @@ public class ClusterModule extends AbstractModule { SnapshotInProgressAllocationDecider.class)); private final Settings settings; - private final DynamicSettings.Builder clusterDynamicSettings = new DynamicSettings.Builder(); + private final Map> clusterDynamicSettings = new HashMap<>(); private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder(); private final ExtensionPoint.SelectedType shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class); private final ExtensionPoint.ClassSet allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class); @@ -145,73 +132,15 @@ public class ClusterModule extends AbstractModule { } private void registerBuiltinClusterSettings() { - registerClusterDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, Validator.EMPTY); - registerClusterDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, Validator.FLOAT); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, Validator.FLOAT); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_THRESHOLD, Validator.NON_NEGATIVE_FLOAT); - registerClusterDynamicSetting(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ALLOCATION_ALLOW_REBALANCE_VALIDATOR); - registerClusterDynamicSetting(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, Validator.INTEGER); - registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); - registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerClusterDynamicSetting(ZenDiscovery.SETTING_REJOIN_ON_MASTER_GONE, Validator.BOOLEAN); - registerClusterDynamicSetting(DiscoverySettings.NO_MASTER_BLOCK, Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerClusterDynamicSetting(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerClusterDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME); - registerClusterDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, Validator.TIME); - registerClusterDynamicSetting(MetaData.SETTING_READ_ONLY, Validator.EMPTY); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, Validator.POSITIVE_BYTES_SIZE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, Validator.INTEGER); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, Validator.BYTES_SIZE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_COMPRESS, Validator.EMPTY); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, Validator.POSITIVE_INTEGER); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, Validator.POSITIVE_INTEGER); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR); - registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER); - registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, Validator.EMPTY); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, Validator.EMPTY); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, Validator.BOOLEAN); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, Validator.BOOLEAN); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, Validator.EMPTY); - registerClusterDynamicSetting(DestructiveOperations.REQUIRES_NAME, Validator.EMPTY); - registerClusterDynamicSetting(DiscoverySettings.PUBLISH_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(DiscoverySettings.PUBLISH_DIFF_ENABLE, Validator.BOOLEAN); - registerClusterDynamicSetting(DiscoverySettings.COMMIT_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, Validator.NON_NEGATIVE_DOUBLE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, Validator.NON_NEGATIVE_DOUBLE); - registerClusterDynamicSetting(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(SearchService.DEFAULT_SEARCH_TIMEOUT, Validator.TIMEOUT); - registerClusterDynamicSetting(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_VALIDATOR); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_INCLUDE, Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_INCLUDE + ".*", Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_EXCLUDE, Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_EXCLUDE + ".*", Validator.EMPTY); - registerClusterDynamicSetting(TransportCloseIndexAction.SETTING_CLUSTER_INDICES_CLOSE_ENABLE, Validator.BOOLEAN); - registerClusterDynamicSetting(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, Validator.INTEGER); - registerClusterDynamicSetting(TransportReplicationAction.SHARD_FAILURE_TIMEOUT, Validator.TIME_NON_NEGATIVE); + for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { + registerSetting(setting); + } } private void registerBuiltinIndexSettings() { registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.EMPTY); + registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.NON_NEGATIVE_INTEGER); registerIndexDynamicSetting(MergeSchedulerConfig.MAX_MERGE_COUNT, Validator.EMPTY); registerIndexDynamicSetting(MergeSchedulerConfig.AUTO_THROTTLE, Validator.EMPTY); registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); @@ -276,8 +205,17 @@ public class ClusterModule extends AbstractModule { indexDynamicSettings.addSetting(setting, validator); } - public void registerClusterDynamicSetting(String setting, Validator validator) { - clusterDynamicSettings.addSetting(setting, validator); + public void registerSetting(Setting setting) { + switch (setting.getScope()) { + case Cluster: + if (clusterDynamicSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + clusterDynamicSettings.put(setting.getKey(), setting); + break; + case Index: + throw new UnsupportedOperationException("not yet implemented"); + } } public void registerAllocationDecider(Class allocationDecider) { @@ -294,7 +232,6 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { - bind(DynamicSettings.class).annotatedWith(ClusterDynamicSettings.class).toInstance(clusterDynamicSettings.build()); bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings.build()); // bind ShardsAllocator @@ -325,5 +262,9 @@ public class ClusterModule extends AbstractModule { bind(NodeIndexDeletedAction.class).asEagerSingleton(); bind(NodeMappingRefreshAction.class).asEagerSingleton(); bind(MappingUpdatedAction.class).asEagerSingleton(); + final ClusterSettingsService clusterSettingsService = new ClusterSettingsService(settings, new ClusterSettings(new HashSet<>(clusterDynamicSettings.values()))); + bind(ClusterSettingsService.class).toInstance(clusterSettingsService); + + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 039868d16c4..13fe2472bd4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -37,11 +37,12 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; @@ -63,8 +64,8 @@ import java.util.concurrent.TimeUnit; */ public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener { - public static final String INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL = "cluster.info.update.interval"; - public static final String INTERNAL_CLUSTER_INFO_TIMEOUT = "cluster.info.update.timeout"; + public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, Setting.Scope.Cluster); + public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, Setting.Scope.Cluster); private volatile TimeValue updateFrequency; @@ -82,7 +83,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private final List listeners = new CopyOnWriteArrayList<>(); @Inject - public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, + public InternalClusterInfoService(Settings settings, ClusterSettingsService clusterSettingsService, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { @@ -95,10 +96,12 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu this.transportIndicesStatsAction = transportIndicesStatsAction; this.clusterService = clusterService; this.threadPool = threadPool; - this.updateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, TimeValue.timeValueSeconds(30)); - this.fetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, TimeValue.timeValueSeconds(15)); - this.enabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true); - nodeSettingsService.addListener(new ApplySettings()); + this.updateFrequency = INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.get(settings); + this.fetchTimeout = INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.get(settings); + this.enabled = DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, this::setFetchTimeout); + clusterSettingsService.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, this::setUpdateFrequency); + clusterSettingsService.addSettingsUpdateConsumer(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); // Add InternalClusterInfoService to listen for Master changes this.clusterService.add((LocalNodeMasterListener)this); @@ -106,35 +109,16 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu this.clusterService.add((ClusterStateListener)this); } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue newUpdateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, null); - // ClusterInfoService is only enabled if the DiskThresholdDecider is enabled - Boolean newEnabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null); + private void setEnabled(boolean enabled) { + this.enabled = enabled; + } - if (newUpdateFrequency != null) { - if (newUpdateFrequency.getMillis() < TimeValue.timeValueSeconds(10).getMillis()) { - logger.warn("[{}] set too low [{}] (< 10s)", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, newUpdateFrequency); - throw new IllegalStateException("Unable to set " + INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL + " less than 10 seconds"); - } else { - logger.info("updating [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, updateFrequency, newUpdateFrequency); - InternalClusterInfoService.this.updateFrequency = newUpdateFrequency; - } - } + private void setFetchTimeout(TimeValue fetchTimeout) { + this.fetchTimeout = fetchTimeout; + } - TimeValue newFetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, null); - if (newFetchTimeout != null) { - logger.info("updating fetch timeout [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_TIMEOUT, fetchTimeout, newFetchTimeout); - InternalClusterInfoService.this.fetchTimeout = newFetchTimeout; - } - - - // We don't log about enabling it here, because the DiskThresholdDecider will already be logging about enable/disable - if (newEnabled != null) { - InternalClusterInfoService.this.enabled = newEnabled; - } - } + void setUpdateFrequency(TimeValue updateFrequency) { + this.updateFrequency = updateFrequency; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index e3925aa6f4e..04fea06138e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -26,11 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.concurrent.TimeoutException; @@ -40,30 +41,23 @@ import java.util.concurrent.TimeoutException; */ public class MappingUpdatedAction extends AbstractComponent { - public static final String INDICES_MAPPING_DYNAMIC_TIMEOUT = "indices.mapping.dynamic_timeout"; + public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue current = MappingUpdatedAction.this.dynamicMappingUpdateTimeout; - TimeValue newValue = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, current); - if (!current.equals(newValue)) { - logger.info("updating " + INDICES_MAPPING_DYNAMIC_TIMEOUT + " from [{}] to [{}]", current, newValue); - MappingUpdatedAction.this.dynamicMappingUpdateTimeout = newValue; - } - } + @Inject + public MappingUpdatedAction(Settings settings, ClusterSettingsService clusterSettingsService) { + super(settings); + this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); } - @Inject - public MappingUpdatedAction(Settings settings, NodeSettingsService nodeSettingsService) { - super(settings); - this.dynamicMappingUpdateTimeout = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, TimeValue.timeValueSeconds(30)); - nodeSettingsService.addListener(new ApplySettings()); + private void setDynamicMappingUpdateTimeout(TimeValue dynamicMappingUpdateTimeout) { + this.dynamicMappingUpdateTimeout = dynamicMappingUpdateTimeout; } + public void setClient(Client client) { this.client = client.admin().indices(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 68fa6e45d88..5b8514c22bc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -140,7 +141,7 @@ public class MetaData implements Iterable, Diffable, Fr } - public static final String SETTING_READ_ONLY = "cluster.blocks.read_only"; + public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.Cluster); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); @@ -745,25 +746,25 @@ public class MetaData implements Iterable, Diffable, Fr /** All known byte-sized cluster settings. */ public static final Set CLUSTER_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet( - IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, - RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, - RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, - RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC)); + IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey())); /** All known time cluster settings. */ public static final Set CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet( - IndicesTTLService.INDICES_TTL_INTERVAL, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, - RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, - RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, - RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, - DiscoverySettings.PUBLISH_TIMEOUT, - InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD)); + IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(), + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), + DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), + InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey())); /** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't * specify a unit. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index b9ce532a611..e99e5c84ecb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -34,12 +34,14 @@ import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.ArrayList; import java.util.Collection; @@ -72,42 +74,47 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; */ public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { - public static final String SETTING_THRESHOLD = "cluster.routing.allocation.balance.threshold"; - public static final String SETTING_INDEX_BALANCE_FACTOR = "cluster.routing.allocation.balance.index"; - public static final String SETTING_SHARD_BALANCE_FACTOR = "cluster.routing.allocation.balance.shard"; - private static final float DEFAULT_INDEX_BALANCE_FACTOR = 0.55f; - private static final float DEFAULT_SHARD_BALANCE_FACTOR = 0.45f; + public static final float DEFAULT_INDEX_BALANCE_FACTOR = 0.55f; + public static final float DEFAULT_SHARD_BALANCE_FACTOR = 0.45f; + public static final float DEFAULT_THRESHOLD = 1.0f; - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - final float indexBalance = settings.getAsFloat(SETTING_INDEX_BALANCE_FACTOR, weightFunction.indexBalance); - final float shardBalance = settings.getAsFloat(SETTING_SHARD_BALANCE_FACTOR, weightFunction.shardBalance); - float threshold = settings.getAsFloat(SETTING_THRESHOLD, BalancedShardsAllocator.this.threshold); - if (threshold <= 0.0f) { - throw new IllegalArgumentException("threshold must be greater than 0.0f but was: " + threshold); - } - BalancedShardsAllocator.this.threshold = threshold; - BalancedShardsAllocator.this.weightFunction = new WeightFunction(indexBalance, shardBalance); - } - } + public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", DEFAULT_INDEX_BALANCE_FACTOR, true, Setting.Scope.Cluster); + public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", DEFAULT_SHARD_BALANCE_FACTOR, true, Setting.Scope.Cluster); + public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", DEFAULT_THRESHOLD, true, Setting.Scope.Cluster); private volatile WeightFunction weightFunction = new WeightFunction(DEFAULT_INDEX_BALANCE_FACTOR, DEFAULT_SHARD_BALANCE_FACTOR); - private volatile float threshold = 1.0f; - + private volatile float threshold = DEFAULT_THRESHOLD; public BalancedShardsAllocator(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); } @Inject - public BalancedShardsAllocator(Settings settings, NodeSettingsService nodeSettingsService) { + public BalancedShardsAllocator(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - ApplySettings applySettings = new ApplySettings(); - applySettings.onRefreshSettings(settings); - nodeSettingsService.addListener(applySettings); + setIndexBalance(INDEX_BALANCE_FACTOR_SETTING.get(settings)); + setShardBalance(SHARD_BALANCE_FACTOR_SETTING.get(settings)); + setThreshold(THRESHOLD_SETTING.get(settings)); + clusterSettingsService.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, this::setIndexBalance); + clusterSettingsService.addSettingsUpdateConsumer(SHARD_BALANCE_FACTOR_SETTING, this::setShardBalance); + clusterSettingsService.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); + } + + public void setIndexBalance(float indexBalance) { + weightFunction = new WeightFunction(indexBalance, weightFunction.shardBalance); + } + + public void setShardBalance(float shardBalanceFactor) { + weightFunction = new WeightFunction(weightFunction.indexBalance, shardBalanceFactor); + } + + public void setThreshold(float threshold) { + if (threshold <= 0.0f) { + throw new IllegalArgumentException("threshold must be greater than 0.0f but was: " + threshold); + } + this.threshold = threshold; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 6f7bbac8aea..450b953fd38 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -24,10 +24,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.HashMap; import java.util.Map; @@ -76,37 +78,12 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; - public static final String CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES = "cluster.routing.allocation.awareness.attributes"; - public static final String CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP = "cluster.routing.allocation.awareness.force."; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - String[] awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, null); - if (awarenessAttributes == null && "".equals(settings.get(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, null))) { - awarenessAttributes = Strings.EMPTY_ARRAY; // the empty string resets this - } - if (awarenessAttributes != null) { - logger.info("updating [cluster.routing.allocation.awareness.attributes] from [{}] to [{}]", AwarenessAllocationDecider.this.awarenessAttributes, awarenessAttributes); - AwarenessAllocationDecider.this.awarenessAttributes = awarenessAttributes; - } - Map forcedAwarenessAttributes = new HashMap<>(AwarenessAllocationDecider.this.forcedAwarenessAttributes); - Map forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP); - if (!forceGroups.isEmpty()) { - for (Map.Entry entry : forceGroups.entrySet()) { - String[] aValues = entry.getValue().getAsArray("values"); - if (aValues.length > 0) { - forcedAwarenessAttributes.put(entry.getKey(), aValues); - } - } - } - AwarenessAllocationDecider.this.forcedAwarenessAttributes = forcedAwarenessAttributes; - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "_na_", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.Cluster); private String[] awarenessAttributes; - private Map forcedAwarenessAttributes; + private volatile Map forcedAwarenessAttributes; /** * Creates a new {@link AwarenessAllocationDecider} instance @@ -121,24 +98,28 @@ public class AwarenessAllocationDecider extends AllocationDecider { * @param settings {@link Settings} to use */ public AwarenessAllocationDecider(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); } @Inject - public AwarenessAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public AwarenessAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - this.awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES); + this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes); + setForcedAwarenessAttributes(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.get(settings)); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, this::setForcedAwarenessAttributes); + } - forcedAwarenessAttributes = new HashMap<>(); - Map forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP); + private void setForcedAwarenessAttributes(Settings forceSettings) { + Map forcedAwarenessAttributes = new HashMap<>(); + Map forceGroups = forceSettings.getAsGroups(); for (Map.Entry entry : forceGroups.entrySet()) { String[] aValues = entry.getValue().getAsArray("values"); if (aValues.length > 0) { forcedAwarenessAttributes.put(entry.getKey(), aValues); } } - - nodeSettingsService.addListener(new ApplySettings()); + this.forcedAwarenessAttributes = forcedAwarenessAttributes; } /** @@ -150,6 +131,10 @@ public class AwarenessAllocationDecider extends AllocationDecider { return this.awarenessAttributes; } + private void setAwarenessAttributes(String[] awarenessAttributes) { + this.awarenessAttributes = awarenessAttributes; + } + @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return underCapacity(shardRouting, node, allocation, true); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 7638c7aeee8..f20489a795e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -19,13 +19,12 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Locale; @@ -49,19 +48,10 @@ import java.util.Locale; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; - - public static final String CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE = "cluster.routing.allocation.allow_rebalance"; - public static final Validator ALLOCATION_ALLOW_REBALANCE_VALIDATOR = (setting, value, clusterState) -> { - try { - ClusterRebalanceType.parseString(value); - return null; - } catch (IllegalArgumentException e) { - return "the value of " + setting + " must be one of: [always, indices_primaries_active, indices_all_active]"; - } - }; + public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", "_na_", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.Cluster); /** - * An enum representation for the configured re-balance type. + * An enum representation for the configured re-balance type. */ public static enum ClusterRebalanceType { /** @@ -73,7 +63,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { */ INDICES_PRIMARIES_ACTIVE, /** - * Re-balancing is allowed only once all shards on all indices are active. + * Re-balancing is allowed only once all shards on all indices are active. */ INDICES_ALL_ACTIVE; @@ -85,48 +75,28 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { } else if ("indices_all_active".equalsIgnoreCase(typeString) || "indicesAllActive".equalsIgnoreCase(typeString)) { return ClusterRebalanceType.INDICES_ALL_ACTIVE; } - throw new IllegalArgumentException("Illegal value for " + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE + ": " + typeString); + throw new IllegalArgumentException("Illegal value for " + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING + ": " + typeString); } } private ClusterRebalanceType type; @Inject - public ClusterRebalanceAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - String allowRebalance = settings.get(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "indices_all_active"); try { - type = ClusterRebalanceType.parseString(allowRebalance); + type = CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.get(settings); } catch (IllegalStateException e) { - logger.warn("[{}] has a wrong value {}, defaulting to 'indices_all_active'", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, allowRebalance); + logger.warn("[{}] has a wrong value {}, defaulting to 'indices_all_active'", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getRaw(settings)); type = ClusterRebalanceType.INDICES_ALL_ACTIVE; } - logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type.toString().toLowerCase(Locale.ROOT)); + logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, type.toString().toLowerCase(Locale.ROOT)); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType); } - class ApplySettings implements NodeSettingsService.Listener { - - @Override - public void onRefreshSettings(Settings settings) { - String newAllowRebalance = settings.get(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, null); - if (newAllowRebalance != null) { - ClusterRebalanceType newType = null; - try { - newType = ClusterRebalanceType.parseString(newAllowRebalance); - } catch (IllegalArgumentException e) { - // ignore - } - - if (newType != null && newType != ClusterRebalanceAllocationDecider.this.type) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, - ClusterRebalanceAllocationDecider.this.type.toString().toLowerCase(Locale.ROOT), - newType.toString().toLowerCase(Locale.ROOT)); - ClusterRebalanceAllocationDecider.this.type = newType; - } - } - } + public void setType(ClusterRebalanceType type) { + this.type = type; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 6bd1b437acf..040c4481951 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -22,8 +22,9 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; /** * Similar to the {@link ClusterRebalanceAllocationDecider} this @@ -41,27 +42,19 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; - public static final String CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE = "cluster.routing.allocation.cluster_concurrent_rebalance"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int clusterConcurrentRebalance = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance); - if (clusterConcurrentRebalance != ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance) { - logger.info("updating [cluster.routing.allocation.cluster_concurrent_rebalance] from [{}], to [{}]", ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance, clusterConcurrentRebalance); - ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance = clusterConcurrentRebalance; - } - } - } - + public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, true, Setting.Scope.Cluster); private volatile int clusterConcurrentRebalance; @Inject - public ConcurrentRebalanceAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ConcurrentRebalanceAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - this.clusterConcurrentRebalance = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 2); + this.clusterConcurrentRebalance = CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.get(settings); logger.debug("using [cluster_concurrent_rebalance] with [{}]", clusterConcurrentRebalance); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, this::setClusterConcurrentRebalance); + } + + public void setClusterConcurrentRebalance(int concurrentRebalance) { + clusterConcurrentRebalance = concurrentRebalance; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index a02c72c5745..40a8442ca3a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -33,15 +33,17 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Set; @@ -80,53 +82,11 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile boolean enabled; private volatile TimeValue rerouteInterval; - public static final String CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED = "cluster.routing.allocation.disk.threshold_enabled"; - public static final String CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK = "cluster.routing.allocation.disk.watermark.low"; - public static final String CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK = "cluster.routing.allocation.disk.watermark.high"; - public static final String CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS = "cluster.routing.allocation.disk.include_relocations"; - public static final String CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL = "cluster.routing.allocation.disk.reroute_interval"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - String newLowWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, null); - String newHighWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, null); - Boolean newRelocationsSetting = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, null); - Boolean newEnableSetting = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null); - TimeValue newRerouteInterval = settings.getAsTime(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, null); - - if (newEnableSetting != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, - DiskThresholdDecider.this.enabled, newEnableSetting); - DiskThresholdDecider.this.enabled = newEnableSetting; - } - if (newRelocationsSetting != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, - DiskThresholdDecider.this.includeRelocations, newRelocationsSetting); - DiskThresholdDecider.this.includeRelocations = newRelocationsSetting; - } - if (newLowWatermark != null) { - if (!validWatermarkSetting(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse low watermark [{}]", newLowWatermark); - } - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, newLowWatermark); - DiskThresholdDecider.this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(newLowWatermark); - DiskThresholdDecider.this.freeBytesThresholdLow = thresholdBytesFromWatermark(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK); - } - if (newHighWatermark != null) { - if (!validWatermarkSetting(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse high watermark [{}]", newHighWatermark); - } - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, newHighWatermark); - DiskThresholdDecider.this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(newHighWatermark); - DiskThresholdDecider.this.freeBytesThresholdHigh = thresholdBytesFromWatermark(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK); - } - if (newRerouteInterval != null) { - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, newRerouteInterval); - DiskThresholdDecider.this.rerouteInterval = newRerouteInterval; - } - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "_na_", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "_na_", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.Cluster);; + public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); /** * Listens for a node to go over the high watermark and kicks off an empty @@ -231,38 +191,49 @@ public class DiskThresholdDecider extends AllocationDecider { // It's okay the Client is null here, because the empty cluster info // service will never actually call the listener where the client is // needed. Also this constructor is only used for tests - this(settings, new NodeSettingsService(settings), EmptyClusterInfoService.INSTANCE, null); + this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), EmptyClusterInfoService.INSTANCE, null); } @Inject - public DiskThresholdDecider(Settings settings, NodeSettingsService nodeSettingsService, ClusterInfoService infoService, Client client) { + public DiskThresholdDecider(Settings settings, ClusterSettingsService clusterSettingsService, ClusterInfoService infoService, Client client) { super(settings); - String lowWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "85%"); - String highWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "90%"); - - if (!validWatermarkSetting(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse low watermark [{}]", lowWatermark); - } - if (!validWatermarkSetting(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse high watermark [{}]", highWatermark); - } - // Watermark is expressed in terms of used data, but we need "free" data watermark - this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); - this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); - - this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK); - this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK); - this.includeRelocations = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true); - this.rerouteInterval = settings.getAsTime(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, TimeValue.timeValueSeconds(60)); - - this.enabled = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true); - nodeSettingsService.addListener(new ApplySettings()); + final String lowWatermark = CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.get(settings); + final String highWatermark = CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.get(settings); + setHighWatermark(highWatermark); + setLowWatermark(lowWatermark); + this.includeRelocations = CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.get(settings); + this.rerouteInterval = CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.get(settings); + this.enabled = CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, this::setLowWatermark); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, this::setHighWatermark); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, this::setIncludeRelocations); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, this::setRerouteInterval); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); infoService.addListener(new DiskListener(client)); } - // For Testing - ApplySettings newApplySettings() { - return new ApplySettings(); + private void setIncludeRelocations(boolean includeRelocations) { + this.includeRelocations = includeRelocations; + } + + private void setRerouteInterval(TimeValue rerouteInterval) { + this.rerouteInterval = rerouteInterval; + } + + private void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + private void setLowWatermark(String lowWatermark) { + // Watermark is expressed in terms of used data, but we need "free" data watermark + this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); + this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); + } + + private void setHighWatermark(String highWatermark) { + // Watermark is expressed in terms of used data, but we need "free" data watermark + this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); + this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); } // For Testing @@ -582,18 +553,19 @@ public class DiskThresholdDecider extends AllocationDecider { * Checks if a watermark string is a valid percentage or byte size value, * returning true if valid, false if invalid. */ - public boolean validWatermarkSetting(String watermark, String settingName) { + public static String validWatermarkSetting(String watermark, String settingName) { try { RatioValue.parseRatioValue(watermark); - return true; } catch (ElasticsearchParseException e) { try { ByteSizeValue.parseBytesSizeValue(watermark, settingName); - return true; } catch (ElasticsearchParseException ex) { - return false; + ex.addSuppressed(e); + throw ex; } } + return watermark; + } private Decision earlyTerminate(RoutingAllocation allocation, ImmutableOpenMap usages) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 0bbd4935044..38183fab830 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -23,14 +23,15 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Locale; /** - * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE} / - * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. + * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / + * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. * The per index settings overrides the cluster wide setting. * *

@@ -54,26 +55,34 @@ import java.util.Locale; * @see Rebalance * @see Allocation */ -public class EnableAllocationDecider extends AllocationDecider implements NodeSettingsService.Listener { +public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; - public static final String CLUSTER_ROUTING_ALLOCATION_ENABLE = "cluster.routing.allocation.enable"; - public static final String INDEX_ROUTING_ALLOCATION_ENABLE = "index.routing.allocation.enable"; + public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", "_na_", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.Cluster); + public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; - public static final String CLUSTER_ROUTING_REBALANCE_ENABLE = "cluster.routing.rebalance.enable"; + public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", "_na_", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.Cluster); public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; - @Inject - public EnableAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public EnableAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - this.enableAllocation = Allocation.parse(settings.get(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.ALL.name())); - this.enableRebalance = Rebalance.parse(settings.get(CLUSTER_ROUTING_REBALANCE_ENABLE, Rebalance.ALL.name())); - nodeSettingsService.addListener(this); + this.enableAllocation = CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.get(settings); + this.enableRebalance = CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, this::setEnableAllocation); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, this::setEnableRebalance); + } + + public void setEnableRebalance(Rebalance enableRebalance) { + this.enableRebalance = enableRebalance; + } + + public void setEnableAllocation(Allocation enableAllocation) { + this.enableAllocation = enableAllocation; } @Override @@ -148,25 +157,9 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe } } - @Override - public void onRefreshSettings(Settings settings) { - final Allocation enable = Allocation.parse(settings.get(CLUSTER_ROUTING_ALLOCATION_ENABLE, this.enableAllocation.name())); - if (enable != this.enableAllocation) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_ENABLE, this.enableAllocation, enable); - EnableAllocationDecider.this.enableAllocation = enable; - } - - final Rebalance enableRebalance = Rebalance.parse(settings.get(CLUSTER_ROUTING_REBALANCE_ENABLE, this.enableRebalance.name())); - if (enableRebalance != this.enableRebalance) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_REBALANCE_ENABLE, this.enableRebalance, enableRebalance); - EnableAllocationDecider.this.enableRebalance = enableRebalance; - } - - } - /** * Allocation values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} * via cluster / index settings. */ public enum Allocation { @@ -192,7 +185,7 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe /** * Rebalance values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} * via cluster / index settings. */ public enum Rebalance { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index e0e2caaf04a..f9ee7ad8182 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -25,10 +25,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; - -import java.util.Map; +import org.elasticsearch.common.settings.ClusterSettingsService; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; @@ -65,36 +64,23 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String INDEX_ROUTING_INCLUDE_GROUP = "index.routing.allocation.include."; public static final String INDEX_ROUTING_EXCLUDE_GROUP = "index.routing.allocation.exclude."; - public static final String CLUSTER_ROUTING_REQUIRE_GROUP = "cluster.routing.allocation.require."; - public static final String CLUSTER_ROUTING_INCLUDE_GROUP = "cluster.routing.allocation.include."; - public static final String CLUSTER_ROUTING_EXCLUDE_GROUP = "cluster.routing.allocation.exclude."; + public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.Cluster); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; private volatile DiscoveryNodeFilters clusterExcludeFilters; @Inject - public FilterAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public FilterAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - Map requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap(); - if (requireMap.isEmpty()) { - clusterRequireFilters = null; - } else { - clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); - } - Map includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap(); - if (includeMap.isEmpty()) { - clusterIncludeFilters = null; - } else { - clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); - } - Map excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap(); - if (excludeMap.isEmpty()) { - clusterExcludeFilters = null; - } else { - clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); - } - nodeSettingsService.addListener(new ApplySettings()); + setClusterRequireFilters(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.get(settings)); + setClusterExcludeFilters(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.get(settings)); + setClusterIncludeFilters(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.get(settings)); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, this::setClusterRequireFilters); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, this::setClusterExcludeFilters); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, this::setClusterIncludeFilters); } @Override @@ -144,21 +130,13 @@ public class FilterAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - Map requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap(); - if (!requireMap.isEmpty()) { - clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); - } - Map includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap(); - if (!includeMap.isEmpty()) { - clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); - } - Map excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap(); - if (!excludeMap.isEmpty()) { - clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); - } - } + private void setClusterRequireFilters(Settings settings) { + clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, settings.getAsMap()); + } + private void setClusterIncludeFilters(Settings settings) { + clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, settings.getAsMap()); + } + private void setClusterExcludeFilters(Settings settings) { + clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, settings.getAsMap()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 3d68ed50d27..ecd03d92c70 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -24,16 +24,16 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; /** * This {@link AllocationDecider} limits the number of shards per node on a per * index or node-wide basis. The allocator prevents a single node to hold more * than {@value #INDEX_TOTAL_SHARDS_PER_NODE} per index and - * {@value #CLUSTER_TOTAL_SHARDS_PER_NODE} globally during the allocation + * cluster.routing.allocation.total_shards_per_node globally during the allocation * process. The limits of this decider can be changed in real-time via a the * index settings API. *

@@ -64,26 +64,18 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final String CLUSTER_TOTAL_SHARDS_PER_NODE = "cluster.routing.allocation.total_shards_per_node"; + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.Cluster); - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - Integer newClusterLimit = settings.getAsInt(CLUSTER_TOTAL_SHARDS_PER_NODE, null); - - if (newClusterLimit != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_TOTAL_SHARDS_PER_NODE, - ShardsLimitAllocationDecider.this.clusterShardLimit, newClusterLimit); - ShardsLimitAllocationDecider.this.clusterShardLimit = newClusterLimit; - } - } - } @Inject - public ShardsLimitAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ShardsLimitAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - this.clusterShardLimit = settings.getAsInt(CLUSTER_TOTAL_SHARDS_PER_NODE, -1); - nodeSettingsService.addListener(new ApplySettings()); + this.clusterShardLimit = CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, this::setClusterShardLimit); + } + + private void setClusterShardLimit(int clusterShardLimit) { + this.clusterShardLimit = clusterShardLimit; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index 37b9f9f461b..fa447626e63 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -23,9 +23,11 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; /** * This {@link org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider} prevents shards that @@ -38,18 +40,7 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { /** * Disables relocation of shards that are currently being snapshotted. */ - public static final String CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED = "cluster.routing.allocation.snapshot.relocation_enabled"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - boolean newEnableRelocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation); - if (newEnableRelocation != enableRelocation) { - logger.info("updating [{}] from [{}], to [{}]", CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation, newEnableRelocation); - enableRelocation = newEnableRelocation; - } - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, Setting.Scope.Cluster); private volatile boolean enableRelocation = false; @@ -66,14 +57,18 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { * @param settings {@link org.elasticsearch.common.settings.Settings} to use */ public SnapshotInProgressAllocationDecider(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); } @Inject - public SnapshotInProgressAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public SnapshotInProgressAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - enableRelocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation); - nodeSettingsService.addListener(new ApplySettings()); + enableRelocation = CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, this::setEnableRelocation); + } + + private void setEnableRelocation(boolean enableRelocation) { + this.enableRelocation = enableRelocation; } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index ed6814d83af..543f0bf780b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -21,11 +21,11 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; /** * {@link ThrottlingAllocationDecider} controls the recovery process per node in @@ -47,27 +47,33 @@ import org.elasticsearch.node.settings.NodeSettingsService; */ public class ThrottlingAllocationDecider extends AllocationDecider { - public static final String NAME = "throttling"; - - public static final String CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = "cluster.routing.allocation.node_initial_primaries_recoveries"; - public static final String CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = "cluster.routing.allocation.node_concurrent_recoveries"; - public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; - public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = 2; public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = 4; + public static final String NAME = "throttling"; + public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; + + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", "_na_", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.Cluster); private volatile int primariesInitialRecoveries; private volatile int concurrentRecoveries; @Inject - public ThrottlingAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ThrottlingAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - - this.primariesInitialRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES); - this.concurrentRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)); + this.primariesInitialRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.get(settings); + this.concurrentRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.get(settings); logger.debug("using node_concurrent_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentRecoveries, primariesInitialRecoveries); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, this::setPrimariesInitialRecoveries); + clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, this::setConcurrentRecoveries); + } - nodeSettingsService.addListener(new ApplySettings()); + private void setConcurrentRecoveries(int concurrentRecoveries) { + this.concurrentRecoveries = concurrentRecoveries; + } + + private void setPrimariesInitialRecoveries(int primariesInitialRecoveries) { + this.primariesInitialRecoveries = primariesInitialRecoveries; } @Override @@ -115,21 +121,4 @@ public class ThrottlingAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "below shard recovery limit of [%d]", concurrentRecoveries); } } - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int primariesInitialRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, ThrottlingAllocationDecider.this.primariesInitialRecoveries); - if (primariesInitialRecoveries != ThrottlingAllocationDecider.this.primariesInitialRecoveries) { - logger.info("updating [cluster.routing.allocation.node_initial_primaries_recoveries] from [{}] to [{}]", ThrottlingAllocationDecider.this.primariesInitialRecoveries, primariesInitialRecoveries); - ThrottlingAllocationDecider.this.primariesInitialRecoveries = primariesInitialRecoveries; - } - - int concurrentRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, ThrottlingAllocationDecider.this.concurrentRecoveries); - if (concurrentRecoveries != ThrottlingAllocationDecider.this.concurrentRecoveries) { - logger.info("updating [cluster.routing.allocation.node_concurrent_recoveries] from [{}] to [{}]", ThrottlingAllocationDecider.this.concurrentRecoveries, concurrentRecoveries); - ThrottlingAllocationDecider.this.concurrentRecoveries = concurrentRecoveries; - } - } - } } diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index d4b15861846..56bf400d46b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.transport.TransportAddress; @@ -46,7 +47,7 @@ import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -62,7 +63,7 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF */ public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { - public static final String SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD = "cluster.service.slow_task_logging_threshold"; + public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); public static final String SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL = "cluster.service.reconnect_interval"; public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; @@ -74,7 +75,7 @@ public class InternalClusterService extends AbstractLifecycleComponent> groupSettings = new HashMap<>(); + private final Map> keySettings = new HashMap<>(); + private final Settings defaults; + + public ClusterSettings(Set> settingsSet) { + Settings.Builder builder = Settings.builder(); + for (Setting entry : settingsSet) { + if (entry.getScope() != Setting.Scope.Cluster) { + throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); + } + if (entry.isGroupSetting()) { + groupSettings.put(entry.getKey(), entry); + } else { + keySettings.put(entry.getKey(), entry); + } + builder.put(entry.getKey(), entry.getDefault(Settings.EMPTY)); + } + this.defaults = builder.build(); + } + + public ClusterSettings() { + this(BUILT_IN_CLUSTER_SETTINGS); + } + + /** + * Returns the {@link Setting} for the given key or null if the setting can not be found. + */ + public Setting get(String key) { + Setting setting = keySettings.get(key); + if (setting == null) { + for (Map.Entry> entry : groupSettings.entrySet()) { + if (entry.getValue().match(key)) { + return entry.getValue(); + } + } + } else { + return setting; + } + return null; + } + + /** + * Returns true if the setting for the given key is dynamically updateable. Otherwise false. + */ + public boolean hasDynamicSetting(String key) { + final Setting setting = get(key); + return setting != null && setting.isDynamic(); + } + + /** + * Returns true if the settings is a logger setting. + */ + public boolean isLoggerSetting(String key) { + return key.startsWith("logger."); + } + + /** + * Returns the cluster settings defaults + */ + public Settings getDefaults() { + return defaults; + } + + public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, + AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, + BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.THRESHOLD_SETTING, + ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, + ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, + ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, + IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, + MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, + MetaData.SETTING_READ_ONLY_SETTING, + RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, + RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS_SETTING, + RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, + RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING, + RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, + RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, + RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, + ThreadPool.THREADPOOL_GROUP_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, + SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, + DestructiveOperations.REQUIRES_NAME_SETTING, + DiscoverySettings.PUBLISH_TIMEOUT_SETTING, + DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, + DiscoverySettings.COMMIT_TIMEOUT_SETTING, + DiscoverySettings.NO_MASTER_BLOCK_SETTING, + HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, + InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, + TransportService.TRACE_LOG_EXCLUDE_SETTING, + TransportService.TRACE_LOG_INCLUDE_SETTING, + TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, + ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING))); +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java new file mode 100644 index 00000000000..e66844d4e88 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Predicate; + +/** + * A service that allows to register for node settings change that can come from cluster + * events holding new settings. + */ +public final class ClusterSettingsService extends SettingsService { + private final ClusterSettings clusterSettings; + + @Inject + public ClusterSettingsService(Settings settings, ClusterSettings clusterSettings) { + super(settings); + this.clusterSettings = clusterSettings; + } + + protected Setting getSetting(String key) { + return this.clusterSettings.get(key); + } + + public ClusterSettings getClusterSettings() { + return clusterSettings; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java new file mode 100644 index 00000000000..169867ef384 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -0,0 +1,394 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.*; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; + +/** + */ +public class Setting extends ToXContentToBytes { + private final String key; + private final String description; + private final Function defaultValue; + private final Function parser; + private final boolean dynamic; + private final Scope scope; + + public Setting(String key, String description, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + this.key = key; + this.description = description; + this.defaultValue = defaultValue; + this.parser = parser; + this.dynamic = dynamic; + this.scope = scope; + } + + /** + * Returns the settings key or a prefix if this setting is a group setting + * @see #isGroupSetting() + */ + public String getKey() { + return key; + } + + /** + * Returns a human readable description of this setting + */ + public String getDescription() { + return description; + } + + /** + * Returns true iff this setting is dynamically updateable, otherwise false + */ + public boolean isDynamic() { + return dynamic; + } + + /** + * Returns the settings scope + */ + public Scope getScope() { + return scope; + } + + /** + * Returns true iff this setting is a group setting. Group settings represent a set of settings + * rather than a single value. The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like cluster.store. + * that matches all settings with this prefix. + */ + public boolean isGroupSetting() { + return false; + } + + /** + * Returns the default values string representation for this setting. + * @param settings a settings object for settings that has a default value depending on another setting if available + */ + public String getDefault(Settings settings) { + return defaultValue.apply(settings); + } + + /** + * Returns true iff this setting is present in the given settings object. Otherwise false + */ + public boolean exists(Settings settings) { + return settings.get(key) != null; + } + + /** + * Returns the settings value. If the setting is not present in the given settings object the default value is returned + * instead. + */ + public T get(Settings settings) { + String value = getRaw(settings); + try { + return parser.apply(value); + } catch (ElasticsearchParseException ex) { + throw ex; + } catch (Throwable t) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", t); + } + } + + /** + * Returns the raw (string) settings value. If the setting is not present in the given settings object the default value is returned + * instead. This is useful if the value can't be parsed due to an invalid value to access the actual value. + */ + public String getRaw(Settings settings) { + return settings.get(key, defaultValue.apply(settings)); + } + + /** + * Returns true iff the given key matches the settings key or if this setting is a group setting if the + * given key is part of the settings group. + * @see #isGroupSetting() + */ + public boolean match(String toTest) { + return Regex.simpleMatch(key, toTest); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("key", key); + builder.field("description", description); + builder.field("type", scope.name()); + builder.field("dynamic", dynamic); + builder.field("default", defaultValue.apply(Settings.EMPTY)); + builder.endObject(); + return builder; + } + + /** + * The settings scope - settings can either be cluster settings or per index settings. + */ + public enum Scope { + Cluster, + Index; + } + + SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { + return newUpdater(consumer, logger, settings, (s) -> true); + } + + SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + if (isDynamic()) { + return new Updater(consumer, logger, settings, accept); + } else { + throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); + } + } + + static SettingsService.SettingUpdater compoundUpdater(final BiConsumer consumer, final Setting aSettting, final Setting bSetting, ESLogger logger, Settings settings) { + final AtomicReference aRef = new AtomicReference<>(); + final AtomicReference bRef = new AtomicReference<>(); + final SettingsService.SettingUpdater aSettingUpdater = aSettting.newUpdater(aRef::set, logger, settings); + final SettingsService.SettingUpdater bSettingUpdater = bSetting.newUpdater(bRef::set, logger, settings); + return new SettingsService.SettingUpdater() { + boolean aHasChanged = false; + boolean bHasChanged = false; + @Override + public boolean prepareApply(Settings settings) { + aHasChanged = aSettingUpdater.prepareApply(settings); + bHasChanged = bSettingUpdater.prepareApply(settings); + return aHasChanged || bHasChanged; + } + + @Override + public void apply() { + aSettingUpdater.apply(); + bSettingUpdater.apply(); + if (aHasChanged || bHasChanged) { + consumer.accept(aRef.get(), bRef.get()); + } + } + + @Override + public void rollback() { + try { + aRef.set(null); + aSettingUpdater.rollback(); + } finally { + bRef.set(null); + bSettingUpdater.rollback(); + } + } + + @Override + public String toString() { + return "CompoundUpdater for: " + aSettingUpdater + " and " + bSettingUpdater; + } + }; + } + + + private class Updater implements SettingsService.SettingUpdater { + private final Consumer consumer; + private final ESLogger logger; + private final Predicate accept; + private String value; + private boolean commitPending; + private String pendingValue; + private T valueInstance; + + public Updater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + this.consumer = consumer; + this.logger = logger; + value = getRaw(settings); + this.accept = accept; + } + + + public boolean prepareApply(Settings settings) { + String newValue = settings.get(key); + if (newValue == null) { + newValue = getRaw(settings); + } + if (value.equals(newValue) == false) { + T inst = get(settings); + if (accept.test(inst) == false) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + getRaw(settings) + "]"); + } + logger.info("update [{}] from [{}] to [{}]", key, value, getRaw(settings)); + pendingValue = newValue; + valueInstance = inst; + commitPending = true; + + } else { + commitPending = false; + } + return commitPending; + } + + public void apply() { + if (commitPending) { + value = pendingValue; + consumer.accept(valueInstance); + } + commitPending = false; + valueInstance = null; + pendingValue = null; + } + + public void rollback() { + commitPending = false; + valueInstance = null; + pendingValue = null; + } + + @Override + public String toString() { + return "Updater for: " + Setting.this.toString(); + } + } + + + public Setting(String key, String description, String defaultValue, Function parser, boolean dynamic, Scope scope) { + this(key, description, (s) -> defaultValue, parser, dynamic, scope); + } + + public static Setting floatSetting(String key, float defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); + } + + public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Integer.toString(defaultValue), Integer::parseInt, dynamic, scope); + } + + public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); + } + + public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); + } + + public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> value.toString(), (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope); + } + + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { + return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, scope); + } + + public static Setting groupSetting(String key, boolean dynamic, Scope scope) { + String prefix = key.endsWith(".") ? key : key + "."; + return new Setting(key, "_na_", "", (s) -> null, dynamic, scope) { + + @Override + public boolean isGroupSetting() { + return true; + } + + @Override + public Settings get(Settings settings) { + return settings.getByPrefix(prefix); + } + + @Override + public boolean match(String toTest) { + return Regex.simpleMatch(prefix + "*", toTest); + } + + @Override + public SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + if (isDynamic() == false) { + throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); + } + final Setting setting = this; + return new SettingsService.SettingUpdater() { + private Settings pendingSettings; + private Settings committedSettings = get(settings); + + @Override + public boolean prepareApply(Settings settings) { + Settings currentSettings = get(settings); + if (currentSettings.equals(committedSettings) == false) { + if (accept.test(currentSettings) == false) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + committedSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]"); + } + pendingSettings = currentSettings; + return true; + } else { + return false; + } + } + + @Override + public void apply() { + if (pendingSettings != null) { + consumer.accept(pendingSettings); + committedSettings = pendingSettings; + } + pendingSettings = null; + } + + @Override + public void rollback() { + pendingSettings = null; + } + + @Override + public String toString() { + return "Updater for: " + setting.toString(); + } + }; + } + }; + } + + public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", defaultValue, (s) -> { + TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); + if (timeValue.millis() < minValue.millis()) { + throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return timeValue; + }, dynamic, scope); + } + + public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { + return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, dynamic, scope); + } + + public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope); + } + + public static Setting nonNegativeDouble(String key, double defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Double.toString(defaultValue), Double::parseDouble, dynamic, scope); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 5e083a9e740..aae4cb2b54d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -614,6 +614,9 @@ public final class Settings implements ToXContent { if (settingPrefix.charAt(settingPrefix.length() - 1) != '.') { settingPrefix = settingPrefix + "."; } + return getGroupsInternal(settingPrefix, ignoreNonGrouped); + } + private Map getGroupsInternal(String settingPrefix, boolean ignoreNonGrouped) throws SettingsException { // we don't really care that it might happen twice Map> map = new LinkedHashMap<>(); for (Object o : settings.keySet()) { @@ -643,6 +646,12 @@ public final class Settings implements ToXContent { } return Collections.unmodifiableMap(retVal); } + /** + * Returns group settings for the given setting prefix. + */ + public Map getAsGroups() throws SettingsException { + return getGroupsInternal("", false); + } /** * Returns a parsed version. diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java new file mode 100644 index 00000000000..b1a1319e800 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java @@ -0,0 +1,201 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Predicate; + +/** + * A basic setting service that can be used for per-index and per-cluster settings. + * This service offers transactional application of updates settings. + */ +public abstract class SettingsService extends AbstractComponent { + private Settings lastSettingsApplied; + private final List settingUpdaters = new ArrayList<>(); + + protected SettingsService(Settings settings) { + super(settings); + } + + /** + * Applies the given settings to all listeners and rolls back the result after application. This + * method will not change any settings but will fail if any of the settings can't be applied. + */ + public synchronized Settings dryRun(Settings settings) { + final Settings build = Settings.builder().put(this.settings).put(settings).build(); + try { + List exceptions = new ArrayList<>(); + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + settingUpdater.prepareApply(build); + } catch (RuntimeException ex) { + exceptions.add(ex); + logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); + } + } + // here we are exhaustive and record all settings that failed. + ExceptionsHelper.rethrowAndSuppress(exceptions); + } finally { + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + settingUpdater.rollback(); + } catch (Exception e) { + logger.warn("failed to rollback settings for [{}]", e, settingUpdater); + } + } + } + return build; + } + + /** + * Applies the given settings to all the settings consumers or to none of them. The settings + * will be merged with the node settings before they are applied while given settings override existing node + * settings. + * @param settings the settings to apply + * @return the unmerged applied settings + */ + public synchronized Settings applySettings(Settings settings) { + if (lastSettingsApplied != null && settings.equals(lastSettingsApplied)) { + // nothing changed in the settings, ignore + return settings; + } + final Settings build = Settings.builder().put(this.settings).put(settings).build(); + boolean success = false; + try { + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + settingUpdater.prepareApply(build); + } catch (Exception ex) { + logger.warn("failed to prepareCommit settings for [{}]", ex, settingUpdater); + throw ex; + } + } + for (SettingUpdater settingUpdater : settingUpdaters) { + settingUpdater.apply(); + } + success = true; + } catch (Exception ex) { + logger.warn("failed to apply settings", ex); + throw ex; + } finally { + if (success == false) { + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + settingUpdater.rollback(); + } catch (Exception e) { + logger.warn("failed to refresh settings for [{}]", e, settingUpdater); + } + } + } + } + + try { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + if (entry.getKey().startsWith("logger.")) { + String component = entry.getKey().substring("logger.".length()); + if ("_root".equals(component)) { + ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); + } else { + ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); + } + } + } + } catch (Exception e) { + logger.warn("failed to refresh settings for [{}]", e, "logger"); + } + + return lastSettingsApplied = settings; + } + + /** + * Adds a settings consumer with a predicate that is only evaluated at update time. + *

+ * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + *

+ */ + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Predicate predicate) { + if (setting != getSetting(setting.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); + } + this.settingUpdaters.add(setting.newUpdater(consumer, logger, settings, predicate)); + } + + /** + * Adds a settings consumer that accepts the values for two settings. The consumer if only notified if one or both settings change. + *

+ * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + *

+ */ + public synchronized void addSettingsUpdateConsumer(Setting
a, Setting b, BiConsumer consumer) { + if (a != getSetting(a.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + a.getKey() + "]"); + } + if (b != getSetting(b.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]"); + } + this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger, settings)); + } + + /** + * Adds a settings consumer. + *

+ * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + *

+ */ + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer) { + addSettingsUpdateConsumer(setting, consumer, (s) -> true); + } + + protected abstract Setting getSetting(String key); + + /** + * Transactional interface to update settings. + * @see Setting + */ + public interface SettingUpdater { + /** + * Prepares applying the given settings to this updater. All the heavy lifting like parsing and validation + * happens in this method. Yet the actual setting should not be changed by this call. + * @param settings the settings to apply + * @return true if this updater will update a setting on calling {@link #apply()} otherwise false + */ + boolean prepareApply(Settings settings); + + /** + * Applies the settings passed to {@link #prepareApply(Settings)} + */ + void apply(); + + /** + * Rolls back to the state before {@link #prepareApply(Settings)} was called. All internal prepared state is cleared after this call. + */ + void rollback(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index ee6371605ee..fb44c7dc9a5 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -229,6 +229,30 @@ public class TimeValue implements Streamable { return Strings.format1Decimals(value, suffix); } + public String getStringRep() { + if (duration < 0) { + return Long.toString(duration); + } + switch (timeUnit) { + case NANOSECONDS: + return Strings.format1Decimals(duration, "nanos"); + case MICROSECONDS: + return Strings.format1Decimals(duration, "micros"); + case MILLISECONDS: + return Strings.format1Decimals(duration, "ms"); + case SECONDS: + return Strings.format1Decimals(duration, "s"); + case MINUTES: + return Strings.format1Decimals(duration, "m"); + case HOURS: + return Strings.format1Decimals(duration, "h"); + case DAYS: + return Strings.format1Decimals(duration, "d"); + default: + throw new IllegalArgumentException("unknown time unit: " + timeUnit.name()); + } + } + public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) { settingName = Objects.requireNonNull(settingName); assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName; diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 20f2c96b120..829a173060d 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -23,9 +23,10 @@ import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.rest.RestStatus; import java.util.EnumSet; @@ -39,38 +40,38 @@ public class DiscoverySettings extends AbstractComponent { * sets the timeout for a complete publishing cycle, including both sending and committing. the master * will continute to process the next cluster state update after this time has elapsed **/ - public static final String PUBLISH_TIMEOUT = "discovery.zen.publish_timeout"; + public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing * to receive responses within this window will cause the cluster state change to be rejected. */ - public static final String COMMIT_TIMEOUT = "discovery.zen.commit_timeout"; - public static final String NO_MASTER_BLOCK = "discovery.zen.no_master_block"; - public static final String PUBLISH_DIFF_ENABLE = "discovery.zen.publish_diff.enable"; + public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", "_na_", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.Cluster); + public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "_na_", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.Cluster); + public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.Cluster); - public static final TimeValue DEFAULT_PUBLISH_TIMEOUT = TimeValue.timeValueSeconds(30); - public static final TimeValue DEFAULT_COMMIT_TIMEOUT = TimeValue.timeValueSeconds(30); - public static final String DEFAULT_NO_MASTER_BLOCK = "write"; public final static int NO_MASTER_BLOCK_ID = 2; - public final static boolean DEFAULT_PUBLISH_DIFF_ENABLE = true; public final static ClusterBlock NO_MASTER_BLOCK_ALL = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); public final static ClusterBlock NO_MASTER_BLOCK_WRITES = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, false, RestStatus.SERVICE_UNAVAILABLE, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; + private volatile TimeValue commitTimeout; private volatile boolean publishDiff; @Inject - public DiscoverySettings(Settings settings, NodeSettingsService nodeSettingsService) { + public DiscoverySettings(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - nodeSettingsService.addListener(new ApplySettings()); - this.noMasterBlock = parseNoMasterBlock(settings.get(NO_MASTER_BLOCK, DEFAULT_NO_MASTER_BLOCK)); - this.publishTimeout = settings.getAsTime(PUBLISH_TIMEOUT, DEFAULT_PUBLISH_TIMEOUT); - this.commitTimeout = settings.getAsTime(COMMIT_TIMEOUT, new TimeValue(Math.min(DEFAULT_COMMIT_TIMEOUT.millis(), publishTimeout.millis()))); - this.publishDiff = settings.getAsBoolean(PUBLISH_DIFF_ENABLE, DEFAULT_PUBLISH_DIFF_ENABLE); + clusterSettingsService.addSettingsUpdateConsumer(NO_MASTER_BLOCK_SETTING, this::setNoMasterBlock); + clusterSettingsService.addSettingsUpdateConsumer(PUBLISH_DIFF_ENABLE_SETTING, this::setPublishDiff); + clusterSettingsService.addSettingsUpdateConsumer(COMMIT_TIMEOUT_SETTING, this::setCommitTimeout); + clusterSettingsService.addSettingsUpdateConsumer(PUBLISH_TIMEOUT_SETTING, this::setPublishTimeout); + this.noMasterBlock = NO_MASTER_BLOCK_SETTING.get(settings); + this.publishTimeout = PUBLISH_TIMEOUT_SETTING.get(settings); + this.commitTimeout = COMMIT_TIMEOUT_SETTING.get(settings); + this.publishDiff = PUBLISH_DIFF_ENABLE_SETTING.get(settings); } /** @@ -88,47 +89,25 @@ public class DiscoverySettings extends AbstractComponent { return noMasterBlock; } - public boolean getPublishDiff() { return publishDiff;} - - private class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue newPublishTimeout = settings.getAsTime(PUBLISH_TIMEOUT, null); - if (newPublishTimeout != null) { - if (newPublishTimeout.millis() != publishTimeout.millis()) { - logger.info("updating [{}] from [{}] to [{}]", PUBLISH_TIMEOUT, publishTimeout, newPublishTimeout); - publishTimeout = newPublishTimeout; - if (settings.getAsTime(COMMIT_TIMEOUT, null) == null && commitTimeout.millis() > publishTimeout.millis()) { - logger.info("reducing default [{}] to [{}] due to publish timeout change", COMMIT_TIMEOUT, publishTimeout); - commitTimeout = publishTimeout; - } - } - } - TimeValue newCommitTimeout = settings.getAsTime(COMMIT_TIMEOUT, null); - if (newCommitTimeout != null) { - if (newCommitTimeout.millis() != commitTimeout.millis()) { - logger.info("updating [{}] from [{}] to [{}]", COMMIT_TIMEOUT, commitTimeout, newCommitTimeout); - commitTimeout = newCommitTimeout; - } - } - String newNoMasterBlockValue = settings.get(NO_MASTER_BLOCK); - if (newNoMasterBlockValue != null) { - ClusterBlock newNoMasterBlock = parseNoMasterBlock(newNoMasterBlockValue); - if (newNoMasterBlock != noMasterBlock) { - noMasterBlock = newNoMasterBlock; - } - } - Boolean newPublishDiff = settings.getAsBoolean(PUBLISH_DIFF_ENABLE, null); - if (newPublishDiff != null) { - if (newPublishDiff != publishDiff) { - logger.info("updating [{}] from [{}] to [{}]", PUBLISH_DIFF_ENABLE, publishDiff, newPublishDiff); - publishDiff = newPublishDiff; - } - } - } + private void setNoMasterBlock(ClusterBlock noMasterBlock) { + this.noMasterBlock = noMasterBlock; } - private ClusterBlock parseNoMasterBlock(String value) { + private void setPublishDiff(boolean publishDiff) { + this.publishDiff = publishDiff; + } + + private void setPublishTimeout(TimeValue publishTimeout) { + this.publishTimeout = publishTimeout; + } + + private void setCommitTimeout(TimeValue commitTimeout) { + this.commitTimeout = commitTimeout; + } + + public boolean getPublishDiff() { return publishDiff;} + + private static ClusterBlock parseNoMasterBlock(String value) { switch (value) { case "all": return NO_MASTER_BLOCK_ALL; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 03111d141ef..7333618aef6 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -55,7 +56,7 @@ import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; @@ -74,7 +75,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { - public final static String SETTING_REJOIN_ON_MASTER_GONE = "discovery.zen.rejoin_on_master_gone"; + public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.Cluster); public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout"; public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout"; public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts"; @@ -139,7 +140,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, - TransportService transportService, final ClusterService clusterService, NodeSettingsService nodeSettingsService, + TransportService transportService, final ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, ElectMasterService electMasterService, DiscoverySettings discoverySettings) { super(settings); @@ -160,7 +161,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen this.masterElectionFilterClientNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_CLIENT, true); this.masterElectionFilterDataNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_DATA, false); this.masterElectionWaitForJoinsTimeout = settings.getAsTime(SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT, TimeValue.timeValueMillis(joinTimeout.millis() / 2)); - this.rejoinOnMasterGone = settings.getAsBoolean(SETTING_REJOIN_ON_MASTER_GONE, true); + this.rejoinOnMasterGone = REJOIN_ON_MASTER_GONE_SETTING.get(settings); if (this.joinRetryAttempts < 1) { throw new IllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]"); @@ -171,7 +172,15 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettingsService.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { + final ClusterState clusterState = clusterService.state(); + int masterNodes = clusterState.nodes().masterNodes().size(); + if (value > masterNodes) { + throw new IllegalArgumentException("cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current master nodes count [" + masterNodes + "]"); + } + return true; + }); + clusterSettingsService.addSettingsUpdateConsumer(REJOIN_ON_MASTER_GONE_SETTING, this::setRejoingOnMasterGone); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, clusterName, clusterService); this.masterFD.addListener(new MasterNodeFailureListener()); @@ -306,6 +315,10 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return clusterJoinsCounter.get() > 0; } + private void setRejoingOnMasterGone(boolean rejoin) { + this.rejoinOnMasterGone = rejoin; + } + /** end of {@link org.elasticsearch.discovery.zen.ping.PingContextProvider } implementation */ @@ -1139,26 +1152,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int minimumMasterNodes = settings.getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, - ZenDiscovery.this.electMaster.minimumMasterNodes()); - if (minimumMasterNodes != ZenDiscovery.this.electMaster.minimumMasterNodes()) { - logger.info("updating {} from [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, - ZenDiscovery.this.electMaster.minimumMasterNodes(), minimumMasterNodes); - handleMinimumMasterNodesChanged(minimumMasterNodes); - } - - boolean rejoinOnMasterGone = settings.getAsBoolean(SETTING_REJOIN_ON_MASTER_GONE, ZenDiscovery.this.rejoinOnMasterGone); - if (rejoinOnMasterGone != ZenDiscovery.this.rejoinOnMasterGone) { - logger.info("updating {} from [{}] to [{}]", SETTING_REJOIN_ON_MASTER_GONE, ZenDiscovery.this.rejoinOnMasterGone, rejoinOnMasterGone); - ZenDiscovery.this.rejoinOnMasterGone = rejoinOnMasterGone; - } - } - } - - /** * All control of the join thread should happen under the cluster state update task thread. * This is important to make sure that the background joining process is always in sync with any cluster state updates diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index 9164a85388a..3ba338b4070 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -22,11 +22,10 @@ package org.elasticsearch.discovery.zen.elect; import com.carrotsearch.hppc.ObjectContainer; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -41,23 +40,7 @@ import java.util.List; */ public class ElectMasterService extends AbstractComponent { - public static final String DISCOVERY_ZEN_MINIMUM_MASTER_NODES = "discovery.zen.minimum_master_nodes"; - public static final Validator DISCOVERY_ZEN_MINIMUM_MASTER_NODES_VALIDATOR = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - int intValue; - try { - intValue = Integer.parseInt(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - int masterNodes = clusterState.nodes().masterNodes().size(); - if (intValue > masterNodes) { - return "cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES + " to more than the current master nodes count [" + masterNodes + "]"; - } - return null; - } - }; + public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, Setting.Scope.Cluster); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on @@ -70,7 +53,7 @@ public class ElectMasterService extends AbstractComponent { public ElectMasterService(Settings settings, Version version) { super(settings); this.minMasterVersion = version.minimumCompatibilityVersion(); - this.minimumMasterNodes = settings.getAsInt(DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1); + this.minimumMasterNodes = DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(settings); logger.debug("using minimum_master_nodes [{}]", minimumMasterNodes); } diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index e83ec695a96..5e410fb6d53 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -227,7 +227,7 @@ public class GatewayService extends AbstractLifecycleComponent i // automatically generate a UID for the metadata if we need to metaDataBuilder.generateClusterUuidIfNeeded(); - if (recoveredState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || currentState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false)) { + if (MetaData.SETTING_READ_ONLY_SETTING.get(recoveredState.metaData().settings()) || MetaData.SETTING_READ_ONLY_SETTING.get(currentState.metaData().settings())) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 1bd023abdb0..48f24613f6f 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -21,37 +21,36 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; /** * IndexStoreConfig encapsulates node / cluster level configuration for index level {@link IndexStore} instances. * For instance it maintains the node level rate limiter configuration: updates to the cluster that disable or enable - * {@value #INDICES_STORE_THROTTLE_TYPE} or {@value #INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC} are reflected immediately + * indices.store.throttle.type or indices.store.throttle.max_bytes_per_sec are reflected immediately * on all referencing {@link IndexStore} instances */ -public class IndexStoreConfig implements NodeSettingsService.Listener { +public class IndexStoreConfig{ /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ - public static final String INDICES_STORE_THROTTLE_TYPE = "indices.store.throttle.type"; + public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", "_na_", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.Cluster); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ - public static final String INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC = "indices.store.throttle.max_bytes_per_sec"; - private volatile String rateLimitingType; + public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.Cluster); + private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); private final ESLogger logger; public IndexStoreConfig(Settings settings) { logger = Loggers.getLogger(IndexStoreConfig.class, settings); // we don't limit by default (we default to CMS's auto throttle instead): - this.rateLimitingType = settings.get("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name()); + this.rateLimitingType = INDICES_STORE_THROTTLE_TYPE_SETTING.get(settings); rateLimiting.setType(rateLimitingType); - this.rateLimitingThrottle = settings.getAsBytesSize("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0)); + this.rateLimitingThrottle = INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.get(settings); rateLimiting.setMaxRate(rateLimitingThrottle); logger.debug("using indices.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimitingType, rateLimitingThrottle); } @@ -63,22 +62,12 @@ public class IndexStoreConfig implements NodeSettingsService.Listener { return rateLimiting; } - @Override - public void onRefreshSettings(Settings settings) { - String rateLimitingType = settings.get(INDICES_STORE_THROTTLE_TYPE, this.rateLimitingType); - // try and parse the type - StoreRateLimiting.Type.fromString(rateLimitingType); - if (!rateLimitingType.equals(this.rateLimitingType)) { - logger.info("updating indices.store.throttle.type from [{}] to [{}]", this.rateLimitingType, rateLimitingType); - this.rateLimitingType = rateLimitingType; - this.rateLimiting.setType(rateLimitingType); - } + public void setRateLimitingType(StoreRateLimiting.Type rateLimitingType) { + this.rateLimitingType = rateLimitingType; + rateLimiting.setType(rateLimitingType); + } - ByteSizeValue rateLimitingThrottle = settings.getAsBytesSize(INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, this.rateLimitingThrottle); - if (!rateLimitingThrottle.equals(this.rateLimitingThrottle)) { - logger.info("updating indices.store.throttle.max_bytes_per_sec from [{}] to [{}], note, type is [{}]", this.rateLimitingThrottle, rateLimitingThrottle, this.rateLimitingType); - this.rateLimitingThrottle = rateLimitingThrottle; - this.rateLimiting.setMaxRate(rateLimitingThrottle); - } + public void setRateLimitingThrottle(ByteSizeValue rateLimitingThrottle) { + this.rateLimitingThrottle = rateLimitingThrottle; } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index dead72aee8b..ad98e3e9a8f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -58,7 +58,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.PluginsService; import java.io.IOException; @@ -100,9 +100,9 @@ public class IndicesService extends AbstractLifecycleComponent i @Inject public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, - NodeSettingsService nodeSettingsService, AnalysisRegistry analysisRegistry, - IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, MapperRegistry mapperRegistry) { + ClusterSettingsService clusterSettingsService, AnalysisRegistry analysisRegistry, + IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, MapperRegistry mapperRegistry) { super(settings); this.pluginsService = pluginsService; this.nodeEnv = nodeEnv; @@ -113,7 +113,9 @@ public class IndicesService extends AbstractLifecycleComponent i this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.mapperRegistry = mapperRegistry; - nodeSettingsService.addListener(indexStoreConfig); + clusterSettingsService.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); + clusterSettingsService.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); + } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 33f3c127d67..e3837fb391c 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -25,9 +25,10 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.ArrayList; import java.util.List; @@ -45,23 +46,15 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); - // Old pre-1.4.0 backwards compatible settings - public static final String OLD_CIRCUIT_BREAKER_MAX_BYTES_SETTING = "indices.fielddata.breaker.limit"; - public static final String OLD_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.fielddata.breaker.overhead"; + public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.Cluster); - public static final String TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.total.limit"; - public static final String DEFAULT_TOTAL_CIRCUIT_BREAKER_LIMIT = "70%"; - - public static final String FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.fielddata.limit"; - public static final String FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.breaker.fielddata.overhead"; + public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.Cluster); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.nonNegativeDouble("indices.breaker.fielddata.overhead", 1.03d, true, Setting.Scope.Cluster); public static final String FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.fielddata.type"; - public static final String DEFAULT_FIELDDATA_BREAKER_LIMIT = "60%"; - public static final double DEFAULT_FIELDDATA_OVERHEAD_CONSTANT = 1.03; - public static final String REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.request.limit"; - public static final String REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.breaker.request.overhead"; + public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.Cluster); + public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.nonNegativeDouble("indices.breaker.request.overhead", 1.0d, true, Setting.Scope.Cluster); public static final String REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.request.type"; - public static final String DEFAULT_REQUEST_BREAKER_LIMIT = "40%"; public static final String DEFAULT_BREAKER_TYPE = "memory"; @@ -73,41 +66,21 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final AtomicLong parentTripCount = new AtomicLong(0); @Inject - public HierarchyCircuitBreakerService(Settings settings, NodeSettingsService nodeSettingsService) { + public HierarchyCircuitBreakerService(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - - // This uses the old InternalCircuitBreakerService.CIRCUIT_BREAKER_MAX_BYTES_SETTING - // setting to keep backwards compatibility with 1.3, it can be safely - // removed when compatibility with 1.3 is no longer needed - String compatibilityFielddataLimitDefault = DEFAULT_FIELDDATA_BREAKER_LIMIT; - ByteSizeValue compatibilityFielddataLimit = settings.getAsMemory(OLD_CIRCUIT_BREAKER_MAX_BYTES_SETTING, null); - if (compatibilityFielddataLimit != null) { - compatibilityFielddataLimitDefault = compatibilityFielddataLimit.toString(); - } - - // This uses the old InternalCircuitBreakerService.CIRCUIT_BREAKER_OVERHEAD_SETTING - // setting to keep backwards compatibility with 1.3, it can be safely - // removed when compatibility with 1.3 is no longer needed - double compatibilityFielddataOverheadDefault = DEFAULT_FIELDDATA_OVERHEAD_CONSTANT; - Double compatibilityFielddataOverhead = settings.getAsDouble(OLD_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (compatibilityFielddataOverhead != null) { - compatibilityFielddataOverheadDefault = compatibilityFielddataOverhead; - } - this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, - settings.getAsMemory(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, compatibilityFielddataLimitDefault).bytes(), - settings.getAsDouble(FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, compatibilityFielddataOverheadDefault), + FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), + FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), CircuitBreaker.Type.parseValue(settings.get(FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE)) ); this.requestSettings = new BreakerSettings(CircuitBreaker.REQUEST, - settings.getAsMemory(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, DEFAULT_REQUEST_BREAKER_LIMIT).bytes(), - settings.getAsDouble(REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0), + REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), + REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), CircuitBreaker.Type.parseValue(settings.get(REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE)) ); - this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, - settings.getAsMemory(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, DEFAULT_TOTAL_CIRCUIT_BREAKER_LIMIT).bytes(), 1.0, CircuitBreaker.Type.PARENT); + this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), 1.0, CircuitBreaker.Type.PARENT); if (logger.isTraceEnabled()) { logger.trace("parent circuit breaker with settings {}", this.parentSettings); } @@ -115,52 +88,41 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { registerBreaker(this.requestSettings); registerBreaker(this.fielddataSettings); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettingsService.addSettingsUpdateConsumer(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, this::setTotalCircuitBreakerLimit, this::validateTotalCircuitBreakerLimit); + clusterSettingsService.addSettingsUpdateConsumer(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setFieldDataBreakerLimit); + clusterSettingsService.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit); + } + private void setRequestBreakerLimit(ByteSizeValue newRequestMax, Double newRequestOverhead) { + long newRequestLimitBytes = newRequestMax == null ? HierarchyCircuitBreakerService.this.requestSettings.getLimit() : newRequestMax.bytes(); + newRequestOverhead = newRequestOverhead == null ? HierarchyCircuitBreakerService.this.requestSettings.getOverhead() : newRequestOverhead; + + BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestLimitBytes, newRequestOverhead, + HierarchyCircuitBreakerService.this.requestSettings.getType()); + registerBreaker(newRequestSettings); + HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings; + logger.info("Updated breaker settings request: {}", newRequestSettings); } - public class ApplySettings implements NodeSettingsService.Listener { + private void setFieldDataBreakerLimit(ByteSizeValue newFielddataMax, Double newFielddataOverhead) { + long newFielddataLimitBytes = newFielddataMax == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getLimit() : newFielddataMax.bytes(); + newFielddataOverhead = newFielddataOverhead == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getOverhead() : newFielddataOverhead; + BreakerSettings newFielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, newFielddataLimitBytes, newFielddataOverhead, + HierarchyCircuitBreakerService.this.fielddataSettings.getType()); + registerBreaker(newFielddataSettings); + HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; + logger.info("Updated breaker settings field data: {}", newFielddataSettings); - @Override - public void onRefreshSettings(Settings settings) { + } - // Fielddata settings - ByteSizeValue newFielddataMax = settings.getAsMemory(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, null); - Double newFielddataOverhead = settings.getAsDouble(FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (newFielddataMax != null || newFielddataOverhead != null) { - long newFielddataLimitBytes = newFielddataMax == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getLimit() : newFielddataMax.bytes(); - newFielddataOverhead = newFielddataOverhead == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getOverhead() : newFielddataOverhead; + private boolean validateTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { + BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.bytes(), 1.0, CircuitBreaker.Type.PARENT); + validateSettings(new BreakerSettings[]{newParentSettings}); + return true; + } - BreakerSettings newFielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, newFielddataLimitBytes, newFielddataOverhead, - HierarchyCircuitBreakerService.this.fielddataSettings.getType()); - registerBreaker(newFielddataSettings); - HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; - logger.info("Updated breaker settings fielddata: {}", newFielddataSettings); - } - - // Request settings - ByteSizeValue newRequestMax = settings.getAsMemory(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, null); - Double newRequestOverhead = settings.getAsDouble(REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (newRequestMax != null || newRequestOverhead != null) { - long newRequestLimitBytes = newRequestMax == null ? HierarchyCircuitBreakerService.this.requestSettings.getLimit() : newRequestMax.bytes(); - newRequestOverhead = newRequestOverhead == null ? HierarchyCircuitBreakerService.this.requestSettings.getOverhead() : newRequestOverhead; - - BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestLimitBytes, newRequestOverhead, - HierarchyCircuitBreakerService.this.requestSettings.getType()); - registerBreaker(newRequestSettings); - HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings; - logger.info("Updated breaker settings request: {}", newRequestSettings); - } - - // Parent settings - long oldParentMax = HierarchyCircuitBreakerService.this.parentSettings.getLimit(); - ByteSizeValue newParentMax = settings.getAsMemory(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, null); - if (newParentMax != null && (newParentMax.bytes() != oldParentMax)) { - BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, newParentMax.bytes(), 1.0, CircuitBreaker.Type.PARENT); - validateSettings(new BreakerSettings[]{newParentSettings}); - HierarchyCircuitBreakerService.this.parentSettings = newParentSettings; - logger.info("Updated breaker settings parent: {}", newParentSettings); - } - } + private void setTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { + BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.bytes(), 1.0, CircuitBreaker.Type.PARENT); + this.parentSettings = newParentSettings; } /** diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 749ba4f3360..eb1c643038f 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -23,16 +23,16 @@ import org.apache.lucene.store.RateLimiter; import org.apache.lucene.store.RateLimiter.SimpleRateLimiter; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; -import java.util.Objects; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; @@ -40,38 +40,37 @@ import java.util.concurrent.TimeUnit; */ public class RecoverySettings extends AbstractComponent implements Closeable { - public static final String INDICES_RECOVERY_FILE_CHUNK_SIZE = "indices.recovery.file_chunk_size"; - public static final String INDICES_RECOVERY_TRANSLOG_OPS = "indices.recovery.translog_ops"; - public static final String INDICES_RECOVERY_TRANSLOG_SIZE = "indices.recovery.translog_size"; - public static final String INDICES_RECOVERY_COMPRESS = "indices.recovery.compress"; - public static final String INDICES_RECOVERY_CONCURRENT_STREAMS = "indices.recovery.concurrent_streams"; - public static final String INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS = "indices.recovery.concurrent_small_file_streams"; - public static final String INDICES_RECOVERY_MAX_BYTES_PER_SEC = "indices.recovery.max_bytes_per_sec"; + public static final Setting INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.file_chunk_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_TRANSLOG_OPS_SETTING = Setting.intSetting("indices.recovery.translog_ops", 1000, true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_TRANSLOG_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.translog_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_COMPRESS_SETTING = Setting.boolSetting("indices.recovery.compress", true, true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_streams", 3, true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_small_file_streams", 2, true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.Cluster); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes * i.e., local node is not yet known on remote node, remote shard not yet started etc. */ - public static final String INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC = "indices.recovery.retry_delay_state_sync"; + public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, Setting.Scope.Cluster); /** how long to wait before retrying after network related issues */ - public static final String INDICES_RECOVERY_RETRY_DELAY_NETWORK = "indices.recovery.retry_delay_network"; - - /** - * recoveries that don't show any activity for more then this interval will be failed. - * defaults to `indices.recovery.internal_action_long_timeout` - */ - public static final String INDICES_RECOVERY_ACTIVITY_TIMEOUT = "indices.recovery.recovery_activity_timeout"; + public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, Setting.Scope.Cluster); /** timeout value to use for requests made as part of the recovery process */ - public static final String INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT = "indices.recovery.internal_action_timeout"; + public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, Setting.Scope.Cluster); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. * defaults to twice `indices.recovery.internal_action_timeout`. */ - public static final String INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT = "indices.recovery.internal_action_long_timeout"; + public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), TimeValue.timeValueSeconds(0), true, Setting.Scope.Cluster); + /** + * recoveries that don't show any activity for more then this interval will be failed. + * defaults to `indices.recovery.internal_action_long_timeout` + */ + public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.Cluster); public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes(); @@ -96,36 +95,32 @@ public class RecoverySettings extends AbstractComponent implements Closeable { @Inject - public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsService) { + public RecoverySettings(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); + this.fileChunkSize = INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.get(settings); + this.translogOps = INDICES_RECOVERY_TRANSLOG_OPS_SETTING.get(settings); + this.translogSize = INDICES_RECOVERY_TRANSLOG_SIZE_SETTING.get(settings); + this.compress = INDICES_RECOVERY_COMPRESS_SETTING.get(settings); - this.fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB)); - this.translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, 1000); - this.translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB)); - this.compress = settings.getAsBoolean(INDICES_RECOVERY_COMPRESS, true); - - this.retryDelayStateSync = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(500)); + this.retryDelayStateSync = INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.get(settings); // doesn't have to be fast as nodes are reconnected every 10s by default (see InternalClusterService.ReconnectToNodes) // and we want to give the master time to remove a faulty node - this.retryDelayNetwork = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_NETWORK, TimeValue.timeValueSeconds(5)); + this.retryDelayNetwork = INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.get(settings); - this.internalActionTimeout = settings.getAsTime(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, TimeValue.timeValueMinutes(15)); - this.internalActionLongTimeout = settings.getAsTime(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, new TimeValue(internalActionTimeout.millis() * 2)); + this.internalActionTimeout = INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(settings); + this.internalActionLongTimeout = INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.get(settings); - this.activityTimeout = settings.getAsTime(INDICES_RECOVERY_ACTIVITY_TIMEOUT, - // default to the internalActionLongTimeout used as timeouts on RecoverySource - internalActionLongTimeout - ); + this.activityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings); - this.concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, 3); + this.concurrentStreams = INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.get(settings); this.concurrentStreamPool = EsExecutors.newScaling("recovery_stream", 0, concurrentStreams, 60, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[recovery_stream]")); - this.concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 2); + this.concurrentSmallFileStreams = INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.get(settings); this.concurrentSmallFileStreamPool = EsExecutors.newScaling("small_file_recovery_stream", 0, concurrentSmallFileStreams, 60, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[small_file_recovery_stream]")); - this.maxBytesPerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(40, ByteSizeUnit.MB)); + this.maxBytesPerSec = INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.get(settings); if (maxBytesPerSec.bytes() <= 0) { rateLimiter = null; } else { @@ -135,7 +130,18 @@ public class RecoverySettings extends AbstractComponent implements Closeable { logger.debug("using max_bytes_per_sec[{}], concurrent_streams [{}], file_chunk_size [{}], translog_size [{}], translog_ops [{}], and compress [{}]", maxBytesPerSec, concurrentStreams, fileChunkSize, translogSize, translogOps, compress); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, this::setFileChunkSize); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_OPS_SETTING, this::setTranslogOps); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, this::setTranslogSize); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_COMPRESS_SETTING, this::setCompress); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, this::setConcurrentStreams); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, this::setConcurrentSmallFileStreams); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, this::setRetryDelayStateSync); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, this::setRetryDelayNetwork); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, this::setInternalActionTimeout); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, this::setInternalActionLongTimeout); + clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setActivityTimeout); } @Override @@ -196,75 +202,60 @@ public class RecoverySettings extends AbstractComponent implements Closeable { return internalActionLongTimeout; } + private void setFileChunkSize(ByteSizeValue fileChunkSize) { + this.fileChunkSize = fileChunkSize; + } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec); - if (!Objects.equals(maxSizePerSec, RecoverySettings.this.maxBytesPerSec)) { - logger.info("updating [{}] from [{}] to [{}]", INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec, maxSizePerSec); - RecoverySettings.this.maxBytesPerSec = maxSizePerSec; - if (maxSizePerSec.bytes() <= 0) { - rateLimiter = null; - } else if (rateLimiter != null) { - rateLimiter.setMBPerSec(maxSizePerSec.mbFrac()); - } else { - rateLimiter = new SimpleRateLimiter(maxSizePerSec.mbFrac()); - } - } + private void setCompress(boolean compress) { + this.compress = compress; + } - ByteSizeValue fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, RecoverySettings.this.fileChunkSize); - if (!fileChunkSize.equals(RecoverySettings.this.fileChunkSize)) { - logger.info("updating [indices.recovery.file_chunk_size] from [{}] to [{}]", RecoverySettings.this.fileChunkSize, fileChunkSize); - RecoverySettings.this.fileChunkSize = fileChunkSize; - } + private void setTranslogOps(int translogOps) { + this.translogOps = translogOps; + } - int translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, RecoverySettings.this.translogOps); - if (translogOps != RecoverySettings.this.translogOps) { - logger.info("updating [indices.recovery.translog_ops] from [{}] to [{}]", RecoverySettings.this.translogOps, translogOps); - RecoverySettings.this.translogOps = translogOps; - } + private void setTranslogSize(ByteSizeValue translogSize) { + this.translogSize = translogSize; + } - ByteSizeValue translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, RecoverySettings.this.translogSize); - if (!translogSize.equals(RecoverySettings.this.translogSize)) { - logger.info("updating [indices.recovery.translog_size] from [{}] to [{}]", RecoverySettings.this.translogSize, translogSize); - RecoverySettings.this.translogSize = translogSize; - } + private void setConcurrentStreams(int concurrentStreams) { + this.concurrentStreams = concurrentStreams; + concurrentStreamPool.setMaximumPoolSize(concurrentStreams); + } - boolean compress = settings.getAsBoolean(INDICES_RECOVERY_COMPRESS, RecoverySettings.this.compress); - if (compress != RecoverySettings.this.compress) { - logger.info("updating [indices.recovery.compress] from [{}] to [{}]", RecoverySettings.this.compress, compress); - RecoverySettings.this.compress = compress; - } + public void setRetryDelayStateSync(TimeValue retryDelayStateSync) { + this.retryDelayStateSync = retryDelayStateSync; + } - int concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, RecoverySettings.this.concurrentStreams); - if (concurrentStreams != RecoverySettings.this.concurrentStreams) { - logger.info("updating [indices.recovery.concurrent_streams] from [{}] to [{}]", RecoverySettings.this.concurrentStreams, concurrentStreams); - RecoverySettings.this.concurrentStreams = concurrentStreams; - RecoverySettings.this.concurrentStreamPool.setMaximumPoolSize(concurrentStreams); - } + public void setRetryDelayNetwork(TimeValue retryDelayNetwork) { + this.retryDelayNetwork = retryDelayNetwork; + } - int concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RecoverySettings.this.concurrentSmallFileStreams); - if (concurrentSmallFileStreams != RecoverySettings.this.concurrentSmallFileStreams) { - logger.info("updating [indices.recovery.concurrent_small_file_streams] from [{}] to [{}]", RecoverySettings.this.concurrentSmallFileStreams, concurrentSmallFileStreams); - RecoverySettings.this.concurrentSmallFileStreams = concurrentSmallFileStreams; - RecoverySettings.this.concurrentSmallFileStreamPool.setMaximumPoolSize(concurrentSmallFileStreams); - } + public void setActivityTimeout(TimeValue activityTimeout) { + this.activityTimeout = activityTimeout; + } - RecoverySettings.this.retryDelayNetwork = maybeUpdate(RecoverySettings.this.retryDelayNetwork, settings, INDICES_RECOVERY_RETRY_DELAY_NETWORK); - RecoverySettings.this.retryDelayStateSync = maybeUpdate(RecoverySettings.this.retryDelayStateSync, settings, INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC); - RecoverySettings.this.activityTimeout = maybeUpdate(RecoverySettings.this.activityTimeout, settings, INDICES_RECOVERY_ACTIVITY_TIMEOUT); - RecoverySettings.this.internalActionTimeout = maybeUpdate(RecoverySettings.this.internalActionTimeout, settings, INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT); - RecoverySettings.this.internalActionLongTimeout = maybeUpdate(RecoverySettings.this.internalActionLongTimeout, settings, INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT); - } + public void setInternalActionTimeout(TimeValue internalActionTimeout) { + this.internalActionTimeout = internalActionTimeout; + } - private TimeValue maybeUpdate(final TimeValue currentValue, final Settings settings, final String key) { - final TimeValue value = settings.getAsTime(key, currentValue); - if (value.equals(currentValue)) { - return currentValue; - } - logger.info("updating [] from [{}] to [{}]", key, currentValue, value); - return value; + public void setInternalActionLongTimeout(TimeValue internalActionLongTimeout) { + this.internalActionLongTimeout = internalActionLongTimeout; + } + + private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { + this.maxBytesPerSec = maxBytesPerSec; + if (maxBytesPerSec.bytes() <= 0) { + rateLimiter = null; + } else if (rateLimiter != null) { + rateLimiter.setMBPerSec(maxBytesPerSec.mbFrac()); + } else { + rateLimiter = new SimpleRateLimiter(maxBytesPerSec.mbFrac()); } } + + private void setConcurrentSmallFileStreams(int concurrentSmallFileStreams) { + this.concurrentSmallFileStreams = concurrentSmallFileStreams; + concurrentSmallFileStreamPool.setMaximumPoolSize(concurrentSmallFileStreams); + } } diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index f095cc355ef..47d18105d0f 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -49,7 +50,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.io.IOException; import java.util.ArrayList; @@ -66,7 +67,7 @@ import java.util.concurrent.locks.ReentrantLock; */ public class IndicesTTLService extends AbstractLifecycleComponent { - public static final String INDICES_TTL_INTERVAL = "indices.ttl.interval"; + public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; @@ -77,16 +78,15 @@ public class IndicesTTLService extends AbstractLifecycleComponent pageCacheRecyclerImpl = PageCacheRecycler.class; Class bigArraysImpl = BigArrays.class; - public NodeModule(Node node, NodeSettingsService nodeSettingsService, MonitorService monitorService) { + public NodeModule(Node node, MonitorService monitorService) { this.node = node; - this.nodeSettingsService = nodeSettingsService; this.monitorService = monitorService; } @@ -60,7 +57,6 @@ public class NodeModule extends AbstractModule { } bind(Node.class).toInstance(node); - bind(NodeSettingsService.class).toInstance(nodeSettingsService); bind(MonitorService.class).toInstance(monitorService); bind(NodeService.class).asEagerSingleton(); } diff --git a/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java b/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java deleted file mode 100644 index dbe6a33172b..00000000000 --- a/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.node.settings; - -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.common.settings.Settings; - -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -/** - * A service that allows to register for node settings change that can come from cluster - * events holding new settings. - */ -public class NodeSettingsService extends AbstractComponent implements ClusterStateListener { - - private static volatile Settings globalSettings = Settings.Builder.EMPTY_SETTINGS; - - /** - * Returns the global (static) settings last updated by a node. Note, if you have multiple - * nodes on the same JVM, it will just return the latest one set... - */ - public static Settings getGlobalSettings() { - return globalSettings; - } - - private volatile Settings lastSettingsApplied; - - private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); - - @Inject - public NodeSettingsService(Settings settings) { - super(settings); - globalSettings = settings; - } - - // inject it as a member, so we won't get into possible cyclic problems - public void setClusterService(ClusterService clusterService) { - clusterService.add(this); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - // nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency - if (event.state().blocks().disableStatePersistence()) { - return; - } - - if (!event.metaDataChanged()) { - // nothing changed in the metadata, no need to check - return; - } - - if (lastSettingsApplied != null && event.state().metaData().settings().equals(lastSettingsApplied)) { - // nothing changed in the settings, ignore - return; - } - - for (Listener listener : listeners) { - try { - listener.onRefreshSettings(event.state().metaData().settings()); - } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, listener); - } - } - - try { - for (Map.Entry entry : event.state().metaData().settings().getAsMap().entrySet()) { - if (entry.getKey().startsWith("logger.")) { - String component = entry.getKey().substring("logger.".length()); - if ("_root".equals(component)) { - ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); - } else { - ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); - } - } - } - } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, "logger"); - } - - lastSettingsApplied = event.state().metaData().settings(); - globalSettings = lastSettingsApplied; - } - - /** - * Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. - */ - public void addListener(Listener listener) { - this.listeners.add(listener); - } - - public void removeListener(Listener listener) { - this.listeners.remove(listener); - } - - public interface Listener { - void onRefreshSettings(Settings settings); - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java index 005b30e6207..bd7e62abf48 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java @@ -88,6 +88,6 @@ public class RestUpdateSettingsAction extends BaseRestHandler { } updateSettingsRequest.settings(updateSettings); - client.admin().indices().updateSettings(updateSettingsRequest, new AcknowledgedRestListener(channel)); + client.admin().indices().updateSettings(updateSettingsRequest, new AcknowledgedRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 9501099997f..bfca6bb322f 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -70,7 +71,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer.TerminationHandle; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -109,9 +110,10 @@ public class SearchService extends AbstractLifecycleComponent imp public static final String NORMS_LOADING_KEY = "index.norms.loading"; public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive"; public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; - public static final String DEFAULT_SEARCH_TIMEOUT = "search.default_search_timeout"; public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); + public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.Cluster); + private final ThreadPool threadPool; @@ -150,7 +152,7 @@ public class SearchService extends AbstractLifecycleComponent imp private final ParseFieldMatcher parseFieldMatcher; @Inject - public SearchService(Settings settings, NodeSettingsService nodeSettingsService, ClusterService clusterService, IndicesService indicesService,IndicesWarmer indicesWarmer, ThreadPool threadPool, + public SearchService(Settings settings, ClusterSettingsService clusterSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { super(settings); @@ -184,19 +186,12 @@ public class SearchService extends AbstractLifecycleComponent imp this.indicesWarmer.addListener(new FieldDataWarmer(indicesWarmer)); this.indicesWarmer.addListener(new SearchWarmer()); - defaultSearchTimeout = settings.getAsTime(DEFAULT_SEARCH_TIMEOUT, NO_TIMEOUT); - nodeSettingsService.addListener(new SearchSettingsListener()); + defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings); + clusterSettingsService.addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); } - class SearchSettingsListener implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - final TimeValue maybeNewDefaultSearchTimeout = settings.getAsTime(SearchService.DEFAULT_SEARCH_TIMEOUT, SearchService.this.defaultSearchTimeout); - if (!maybeNewDefaultSearchTimeout.equals(SearchService.this.defaultSearchTimeout)) { - logger.info("updating [{}] from [{}] to [{}]", SearchService.DEFAULT_SEARCH_TIMEOUT, SearchService.this.defaultSearchTimeout, maybeNewDefaultSearchTimeout); - SearchService.this.defaultSearchTimeout = maybeNewDefaultSearchTimeout; - } - } + private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { + this.defaultSearchTimeout = defaultSearchTimeout; } @Override diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index cd710d52cdc..c4049573b9b 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -33,8 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -50,6 +49,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; @@ -118,18 +118,19 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis private final MetaDataCreateIndexService createIndexService; - private final DynamicSettings dynamicSettings; + private final ClusterSettings dynamicSettings; private final MetaDataIndexUpgradeService metaDataIndexUpgradeService; private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); private final BlockingQueue updatedSnapshotStateQueue = ConcurrentCollections.newBlockingQueue(); + private final ClusterSettingsService clusterSettingsService; @Inject public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, TransportService transportService, - AllocationService allocationService, MetaDataCreateIndexService createIndexService, @ClusterDynamicSettings DynamicSettings dynamicSettings, - MetaDataIndexUpgradeService metaDataIndexUpgradeService) { + AllocationService allocationService, MetaDataCreateIndexService createIndexService, ClusterSettings dynamicSettings, + MetaDataIndexUpgradeService metaDataIndexUpgradeService, ClusterSettingsService clusterSettingsService) { super(settings); this.clusterService = clusterService; this.repositoriesService = repositoriesService; @@ -140,6 +141,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; transportService.registerRequestHandler(UPDATE_RESTORE_ACTION_NAME, UpdateIndexShardRestoreStatusRequest::new, ThreadPool.Names.SAME, new UpdateRestoreStateRequestHandler()); clusterService.add(this); + this.clusterSettingsService = clusterSettingsService; } /** @@ -389,24 +391,9 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis private void restoreGlobalStateIfRequested(MetaData.Builder mdBuilder) { if (request.includeGlobalState()) { if (metaData.persistentSettings() != null) { - boolean changed = false; - Settings.Builder persistentSettings = Settings.settingsBuilder().put(); - for (Map.Entry entry : metaData.persistentSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - persistentSettings.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); - } - } else { - logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); - } - } - if (changed) { - mdBuilder.persistentSettings(persistentSettings.build()); - } + Settings settings = metaData.persistentSettings(); + clusterSettingsService.dryRun(settings); + mdBuilder.persistentSettings(settings); } if (metaData.templates() != null) { // TODO: Should all existing templates be deleted first? diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index b0d81279b03..f0fc5d86b00 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -20,13 +20,12 @@ package org.elasticsearch.threadpool; import org.apache.lucene.util.Counter; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.SizeValue; @@ -38,14 +37,12 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import java.io.IOException; import java.util.*; import java.util.concurrent.*; -import java.util.function.Function; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -172,7 +169,7 @@ public class ThreadPool extends AbstractComponent { } } - public static final String THREADPOOL_GROUP = "threadpool."; + public static final Setting THREADPOOL_GROUP_SETTING = Setting.groupSetting("threadpool.", true, Setting.Scope.Cluster); private volatile Map executors; @@ -184,7 +181,7 @@ public class ThreadPool extends AbstractComponent { private final EstimatedTimeThread estimatedTimeThread; - private boolean settingsListenerIsSet = false; + private final AtomicBoolean settingsListenerIsSet = new AtomicBoolean(false); static final Executor DIRECT_EXECUTOR = command -> command.run(); @@ -197,7 +194,8 @@ public class ThreadPool extends AbstractComponent { assert settings.get("name") != null : "ThreadPool's settings should contain a name"; - Map groupSettings = getThreadPoolSettingsGroup(settings); + Map groupSettings = THREADPOOL_GROUP_SETTING.get(settings).getAsGroups(); + validate(groupSettings); int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings); int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5); @@ -252,18 +250,12 @@ public class ThreadPool extends AbstractComponent { this.estimatedTimeThread.start(); } - private Map getThreadPoolSettingsGroup(Settings settings) { - Map groupSettings = settings.getGroups(THREADPOOL_GROUP); - validate(groupSettings); - return groupSettings; - } - - public void setNodeSettingsService(NodeSettingsService nodeSettingsService) { - if(settingsListenerIsSet) { + public void setNodeSettingsService(ClusterSettingsService clusterSettingsService) { + if(settingsListenerIsSet.compareAndSet(false, true)) { + clusterSettingsService.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> {validate(s.getAsGroups()); return true;}); + } else { throw new IllegalStateException("the node settings listener was set more then once"); } - nodeSettingsService.addListener(new ApplySettings()); - settingsListenerIsSet = true; } public long estimatedTimeInMillis() { @@ -526,8 +518,8 @@ public class ThreadPool extends AbstractComponent { throw new IllegalArgumentException("No type found [" + type + "], for [" + name + "]"); } - public void updateSettings(Settings settings) { - Map groupSettings = getThreadPoolSettingsGroup(settings); + private void updateSettings(Settings settings) { + Map groupSettings = settings.getAsGroups(); if (groupSettings.isEmpty()) { return; } @@ -583,7 +575,7 @@ public class ThreadPool extends AbstractComponent { ThreadPoolType correctThreadPoolType = THREAD_POOL_TYPES.get(key); // TODO: the type equality check can be removed after #3760/#6732 are addressed if (type != null && !correctThreadPoolType.getType().equals(type)) { - throw new IllegalArgumentException("setting " + THREADPOOL_GROUP + key + ".type to " + type + " is not permitted; must be " + correctThreadPoolType.getType()); + throw new IllegalArgumentException("setting " + THREADPOOL_GROUP_SETTING.getKey() + key + ".type to " + type + " is not permitted; must be " + correctThreadPoolType.getType()); } } } @@ -866,13 +858,6 @@ public class ThreadPool extends AbstractComponent { } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - updateSettings(settings); - } - } - /** * Returns true if the given service was terminated successfully. If the termination timed out, * the service is null this method will return false. @@ -911,38 +896,4 @@ public class ThreadPool extends AbstractComponent { } return false; } - - public static ThreadPoolTypeSettingsValidator THREAD_POOL_TYPE_SETTINGS_VALIDATOR = new ThreadPoolTypeSettingsValidator(); - private static class ThreadPoolTypeSettingsValidator implements Validator { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - // TODO: the type equality validation can be removed after #3760/#6732 are addressed - Matcher matcher = Pattern.compile("threadpool\\.(.*)\\.type").matcher(setting); - if (!matcher.matches()) { - return null; - } else { - String threadPool = matcher.group(1); - ThreadPool.ThreadPoolType defaultThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPool); - ThreadPool.ThreadPoolType threadPoolType; - try { - threadPoolType = ThreadPool.ThreadPoolType.fromType(value); - } catch (IllegalArgumentException e) { - return e.getMessage(); - } - if (defaultThreadPoolType.equals(threadPoolType)) { - return null; - } else { - return String.format( - Locale.ROOT, - "thread pool type for [%s] can only be updated to [%s] but was [%s]", - threadPool, - defaultThreadPoolType.getType(), - threadPoolType.getType() - ); - } - } - - } - } - } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 14fc9029b00..f9cbb012b2a 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -37,16 +38,14 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; @@ -88,14 +87,13 @@ public class TransportService extends AbstractLifecycleComponent TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "_na_", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); + public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "_na_", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster);; private final ESLogger tracerLog; volatile String[] tracerLogInclude; volatile String[] tracelLogExclude; - private final ApplySettings settingsListener = new ApplySettings(); /** if set will call requests sent to this id to shortcut and executed locally */ volatile DiscoveryNode localNode = null; @@ -109,8 +107,8 @@ public class TransportService extends AbstractLifecycleComponent Nodes upgrade complete"); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 825e3e40894..6946e35861c 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -45,8 +45,8 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) .build(); internalCluster().startMasterOnlyNodesAsync(3, sharedSettings).get(); diff --git a/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java index b857e4d59ee..e1265d577e9 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java @@ -62,7 +62,7 @@ public class ReplicaRecoveryBenchmark { BootstrapForTesting.ensureInitialized(); Settings settings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, "false") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), "false") .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .put(TransportModule.TRANSPORT_TYPE_KEY, "local") diff --git a/core/src/test/java/org/elasticsearch/benchmark/transport/BenchmarkNettyLargeMessages.java b/core/src/test/java/org/elasticsearch/benchmark/transport/BenchmarkNettyLargeMessages.java index 553ef0c6ac6..14447fb715f 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/transport/BenchmarkNettyLargeMessages.java +++ b/core/src/test/java/org/elasticsearch/benchmark/transport/BenchmarkNettyLargeMessages.java @@ -22,7 +22,6 @@ package org.elasticsearch.benchmark.transport; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -30,7 +29,6 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.elasticsearch.transport.netty.NettyTransport; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 137c6c5b2c2..b5e36c08f74 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -120,7 +120,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { public Settings nodeSettings(int ord) { return Settings.builder() .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 30) // increase recovery speed for small files + .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), 30) // increase recovery speed for small files .build(); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index bccd4290d8e..b536c88ae4e 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -181,7 +181,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { logger.info("--> check settings"); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); - assertThat(clusterState.metaData().persistentSettings().get(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP + "version_attr"), equalTo(version)); + assertThat(clusterState.metaData().persistentSettings().get(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "version_attr"), equalTo(version)); logger.info("--> check templates"); IndexTemplateMetaData template = clusterState.getMetaData().templates().get("template_" + version.toLowerCase(Locale.ROOT)); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 5ed45620a03..8aa065548df 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.Store; @@ -126,7 +127,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() // manual collection or upon cluster forming. - .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, "1s") + .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), "1s") .build(); } @@ -137,9 +138,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { } public void testClusterInfoServiceCollectsInformation() throws Exception { - internalCluster().startNodesAsync(2, - Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "200ms").build()) - .get(); + internalCluster().startNodesAsync(2).get(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE).build())); @@ -147,6 +146,8 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { InternalTestCluster internalTestCluster = internalCluster(); // Get the cluster info service on the master node final InternalClusterInfoService infoService = (InternalClusterInfoService) internalTestCluster.getInstance(ClusterInfoService.class, internalTestCluster.getMasterName()); + infoService.setUpdateFrequency(TimeValue.timeValueMillis(200)); + infoService.onMaster(); ClusterInfo info = infoService.refresh(); assertNotNull("info should not be null", info); ImmutableOpenMap leastUsages = info.getNodeLeastAvailableDiskUsages(); @@ -188,7 +189,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public void testClusterInfoServiceInformationClearOnError() throws InterruptedException, ExecutionException { internalCluster().startNodesAsync(2, // manually control publishing - Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "60m").build()) + Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), "60m").build()) .get(); prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).get(); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 4ca0fffbdfc..69fb6cb1a61 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -31,10 +31,12 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.settings.ClusterSettingsService; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.settings.IndexDynamicSettings; @@ -75,16 +77,16 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSettingDuplicate() { ClusterModule module = new ClusterModule(Settings.EMPTY); try { - module.registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); + module.registerSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING); } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE + "] twice"); + assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice"); } } public void testRegisterClusterDynamicSetting() { ClusterModule module = new ClusterModule(Settings.EMPTY); - module.registerClusterDynamicSetting("foo.bar", Validator.EMPTY); - assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), ClusterDynamicSettings.class); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster)); + assertInstanceBinding(module, ClusterSettingsService.class, service -> service.getClusterSettings().hasDynamicSetting("foo.bar")); } public void testRegisterIndexDynamicSettingDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 9e842a38722..5199d3fc2ef 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -940,7 +940,7 @@ public class ClusterServiceIT extends ESIntegTestCase { public void testLongClusterStateUpdateLogging() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "local") - .put(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, "10s") + .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10s") .build(); internalCluster().startNode(settings); ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class); @@ -976,7 +976,7 @@ public class ClusterServiceIT extends ESIntegTestCase { processedFirstTask.await(1, TimeUnit.SECONDS); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, "10ms"))); + .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10ms"))); clusterService1.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 648356be173..2f1e5d33f7e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -280,7 +280,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { assertNoMasterBlockOnAllNodes(); logger.info("--> bringing another node up"); - internalCluster().startNode(settingsBuilder().put(settings).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2).build()); + internalCluster().startNode(settingsBuilder().put(settings).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2).build()); clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").get(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); } @@ -317,7 +317,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { // set an initial value which is at least quorum to avoid split brains during initial startup int initialMinMasterNodes = randomIntBetween(nodeCount / 2 + 1, nodeCount); - settings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, initialMinMasterNodes); + settings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), initialMinMasterNodes); logger.info("--> starting [{}] nodes. min_master_nodes set to [{}]", nodeCount, initialMinMasterNodes); @@ -328,19 +328,21 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { int updateCount = randomIntBetween(1, nodeCount); - logger.info("--> updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount); + logger.info("--> updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount))); + .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount))); logger.info("--> verifying no node left and master is up"); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodeCount)).get().isTimedOut()); updateCount = nodeCount + randomIntBetween(1, 2000); - logger.info("--> trying to updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount); - assertThat(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount)) - .get().getPersistentSettings().getAsMap().keySet(), - empty()); + logger.info("--> trying to updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount); + try { + client().admin().cluster().prepareUpdateSettings() + .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount)); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [" +updateCount+ "]"); + } logger.info("--> verifying no node left and master is up"); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodeCount)).get().isTimedOut()); @@ -351,8 +353,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { .put("discovery.type", "zen") .put(FaultDetection.SETTING_PING_TIMEOUT, "1h") // disable it .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) - .put(DiscoverySettings.COMMIT_TIMEOUT, "100ms") // speed things up + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up .build(); internalCluster().startNodesAsync(3, settings).get(); ensureGreen(); // ensure cluster state is recovered before we disrupt things diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index e0f8b2cb840..8e5479d6f84 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -67,7 +67,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.zen.minimum_master_nodes", 2) .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") .put("discovery.initial_state_timeout", "500ms") - .put(DiscoverySettings.NO_MASTER_BLOCK, "all") + .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all") .build(); TimeValue timeout = TimeValue.timeValueMillis(200); @@ -219,7 +219,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.zen.minimum_master_nodes", 2) .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") .put("discovery.initial_state_timeout", "500ms") - .put(DiscoverySettings.NO_MASTER_BLOCK, "write") + .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write") .build(); internalCluster().startNode(settings); diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 81de8b1a43c..c5e48a97dfd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -50,8 +50,8 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { .put(super.nodeSettings(nodeOrdinal)) //make sure that enough concurrent reroutes can happen at the same time //we have a minimum of 2 nodes, and a maximum of 10 shards, thus 5 should be enough - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 10) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 5) + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 10) .build(); } @@ -69,7 +69,7 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { private void removePublishTimeout() { //to test that the acknowledgement mechanism is working we better disable the wait for publish //otherwise the operation is most likely acknowledged even if it doesn't support ack - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0"))); + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0"))); } public void testClusterUpdateSettingsAcknowledgement() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java index 47517a753af..7d3825a14b8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java @@ -67,8 +67,8 @@ public class AckIT extends ESIntegTestCase { //to test that the acknowledgement mechanism is working we better disable the wait for publish //otherwise the operation is most likely acknowledged even if it doesn't support ack return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(DiscoverySettings.PUBLISH_TIMEOUT, 0).build(); - } + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), 0).build(); +} public void testUpdateSettingsAcknowledgement() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index f9151628b8a..726590104f1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -106,9 +106,9 @@ public class AwarenessAllocationIT extends ESIntegTestCase { public void testAwarenessZones() throws Exception { Settings commonSettings = Settings.settingsBuilder() - .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP + "zone.values", "a,b") - .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, "zone") - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 3) + .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a,b") + .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone") + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 3) .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "10s") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 1605e70637e..b85c17097f2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -56,7 +56,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; @@ -71,15 +71,15 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testRerouteWithCommands_disableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build(); rerouteWithCommands(commonSettings); } public void testRerouteWithCommands_enableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); rerouteWithCommands(commonSettings); } @@ -147,15 +147,15 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testRerouteWithAllocateLocalGateway_disableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build(); rerouteWithAllocateLocalGateway(commonSettings); } public void testRerouteWithAllocateLocalGateway_enableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); rerouteWithAllocateLocalGateway(commonSettings); } @@ -279,7 +279,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> disable allocation"); Settings newSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); client().admin().cluster().prepareUpdateSettings().setTransientSettings(newSettings).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index d10912e69db..336846f2da9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -51,7 +51,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testAddNodesAndIndices() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); AllocationService service = createAllocationService(settings.build()); ClusterState clusterState = initCluster(service, 1, 3, 3, 1); @@ -94,7 +94,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testMinimalRelocations() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 2); AllocationService service = createAllocationService(settings.build()); @@ -162,7 +162,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testMinimalRelocationsNoLimit() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100); AllocationService service = createAllocationService(settings.build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 6ac2b7df9ca..1cf5ba0083d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -98,8 +98,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testAllocateCommand() { AllocationService allocation = createAllocationService(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build()); logger.info("--> building initial routing table"); @@ -186,8 +186,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testCancelCommand() { AllocationService allocation = createAllocationService(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build()); logger.info("--> building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java index d7a049d1b92..8d510e7f0c5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java @@ -39,8 +39,8 @@ public class AllocationPriorityTests extends ESAllocationTestCase { public void testPrioritizedIndicesAllocatedFirst() { AllocationService allocation = createAllocationService(settingsBuilder(). put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 1).build()); + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 1).build()); final String highPriorityName; final String lowPriorityName; final int priorityFirst; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index 7be6037cf79..e9d0f75b1c1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -55,7 +55,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -123,7 +123,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded2() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -193,7 +193,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .put("cluster.routing.allocation.balance.index", 0.0f) @@ -293,7 +293,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -387,7 +387,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded5() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -465,7 +465,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded6() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -545,7 +545,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness1() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -612,7 +612,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness2() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -681,7 +681,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") @@ -767,7 +767,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { .put("cluster.routing.allocation.awareness.attributes", "zone") .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -828,7 +828,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testUnassignedShardsWithUnbalancedZones() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "zone") .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 1092b2ede19..bc4ef8235d4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -37,10 +37,11 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.hamcrest.Matchers; @@ -65,10 +66,10 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { final float balanceTreshold = 1.0f; Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, indexBalance); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, replicaBalance); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, balanceTreshold); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); AllocationService strategy = createAllocationService(settings.build()); @@ -90,10 +91,10 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { final float balanceTreshold = 1.0f; Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, indexBalance); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, replicaBalance); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, balanceTreshold); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); AllocationService strategy = createAllocationService(settings.build()); @@ -279,36 +280,30 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testPersistedSettings() { Settings.Builder settings = settingsBuilder(); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, 0.2); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, 0.3); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, 2.0); - final NodeSettingsService.Listener[] listeners = new NodeSettingsService.Listener[1]; - NodeSettingsService service = new NodeSettingsService(settingsBuilder().build()) { - - @Override - public void addListener(Listener listener) { - assertNull("addListener was called twice while only one time was expected", listeners[0]); - listeners[0] = listener; - } - - }; + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0); + ClusterSettingsService service = new ClusterSettingsService(settingsBuilder().build(), new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings.build(), service); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f)); assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f)); settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - listeners[0].onRefreshSettings(settings.build()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + service.applySettings(settings.build()); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f)); assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f)); settings = settingsBuilder(); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, 0.5); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, 0.1); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, 3.0); - listeners[0].onRefreshSettings(settings.build()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.1); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 3.0); + service.applySettings(settings.build()); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.5f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.1f)); assertThat(allocator.getThreshold(), Matchers.equalTo(3.0f)); @@ -317,7 +312,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testNoRebalanceOnPrimaryOverload() { Settings.Builder settings = settingsBuilder(); AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(), - new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), new ShardsAllocators(settings.build(), + new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), new ShardsAllocators(settings.build(), NoopGatewayAllocator.INSTANCE, new ShardsAllocator() { @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 8dad41db2f8..15a6ea0a5f4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -46,7 +46,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ClusterRebalanceRoutingTests.class); public void testAlways() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() @@ -132,7 +132,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { public void testClusterPrimariesActive1() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -236,7 +236,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterPrimariesActive2() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -320,7 +320,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive1() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -443,7 +443,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive2() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -527,7 +527,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive3() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -737,7 +737,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { public void testRebalanceWhileShardFetching() { final AtomicBoolean hasFetches = new AtomicBoolean(true); - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build(), new NoopGatewayAllocator() { @Override public boolean allocateUnassigned(RoutingAllocation allocation) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index e16e7cc2cec..d807dc1b5ca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -46,7 +46,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testSimpleDeadNodeOnStartedPrimaryShard() { AllocationService allocation = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -97,7 +97,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnToNode() { AllocationService allocation = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -171,7 +171,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnFromNode() { AllocationService allocation = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 3b242d8676f..affab78521c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -41,7 +41,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(FailedNodeRoutingTests.class); public void testSimpleFailedNodeTest() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 9bfaf7e9997..8dffacaa379 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -57,7 +57,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailedShardPrimaryRelocatingToAndFrom() { AllocationService allocation = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -145,7 +145,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailPrimaryStartedCheckReplicaElected() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -226,7 +226,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureSingleNode() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -282,7 +282,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testSingleShardMultipleAllocationFailures() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -338,7 +338,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureTwoNodes() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -398,7 +398,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testRebalanceFailure() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java index aa6fdef828a..d5f8134d95f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java @@ -48,7 +48,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -178,7 +178,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -340,7 +340,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 62501cbf9fb..c00cd843b1b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -58,7 +58,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testDoNotAllocateFromPrimary() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -172,7 +172,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRandom() { AllocationService service = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -221,7 +221,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRollingRestart() { AllocationService service = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index fbc742573e9..18725a0de78 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -57,7 +57,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), new ClusterInfoService() { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java index eca2a227f8f..eec1b48be97 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java @@ -47,7 +47,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -119,7 +119,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -211,7 +211,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 1) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 3) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java index f096ab0b13d..e1586c433a5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java @@ -41,7 +41,7 @@ public class ShardVersioningTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ShardVersioningTests.class); public void testSimple() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index 11d41a6a336..c0f0c0c2252 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -90,7 +90,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { public void testClusterLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, 1) + .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1) .build()); logger.info("Building initial routing table"); @@ -126,7 +126,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { // Bump the cluster total shards to 2 strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, 2) + .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2) .build()); logger.info("Do another reroute, make sure shards are now allocated"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index ed44b84a886..29ef451324d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -211,7 +211,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexEvenDistribution() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -323,7 +323,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexUnevenNodes() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java index 671cce007c9..aec81a6e063 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java @@ -50,7 +50,7 @@ public class TenShardsOneReplicaRoutingTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.balance.index", 0.0f) .put("cluster.routing.allocation.balance.replica", 1.0f) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index a739f30856a..5377d09d4b5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -60,9 +60,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThreshold() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.8).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "node1", "/dev/null", 100, 10)); // 90% used @@ -96,7 +96,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -170,9 +170,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 70 instead of 80 // node2 now should not have new shards allocated to it, but shards can remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.7).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.7).build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -181,7 +181,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -201,9 +201,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 60 instead of 70 // node2 now should not have new shards allocated to it, and shards cannot remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.5) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.6).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.5) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.6).build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -212,7 +212,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -254,9 +254,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThresholdWithAbsoluteSizes() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "30b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "9b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "30b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "9b").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 10)); // 90% used @@ -292,7 +292,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -349,7 +349,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -405,9 +405,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 70 instead of 80 // node2 now should not have new shards allocated to it, but shards can remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "40b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "30b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "40b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "30b").build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -416,7 +416,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -436,9 +436,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 60 instead of 70 // node2 now should not have new shards allocated to it, and shards cannot remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "50b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "40b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "50b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "40b").build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -447,7 +447,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -522,9 +522,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThresholdWithShardSizes() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "71%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "71%").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 31)); // 69% used @@ -556,7 +556,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -589,9 +589,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testUnknownDiskUsage() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.85).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.85).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node2", new DiskUsage("node2", "node2", "/dev/null", 100, 50)); // 50% used @@ -624,7 +624,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -688,10 +688,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testShardRelocationsTakenIntoAccount() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.8).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 40)); // 60% used @@ -727,7 +727,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -794,10 +794,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testCanRemainWithShardRelocatingAway() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%").build(); // We have an index with 2 primary shards each taking 40 bytes. Each node has 100 bytes available ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); @@ -889,7 +889,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ))); AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); // Ensure that the reroute call doesn't alter the routing table, since the first primary is relocating away @@ -906,10 +906,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testForSingleDataNode() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 100)); // 0% used @@ -989,7 +989,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); RoutingAllocation.Result result = strategy.reroute(clusterState, "reroute"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index a386883ad1b..279687a004f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -28,12 +28,13 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -45,7 +46,7 @@ import static org.hamcrest.CoreMatchers.equalTo; */ public class DiskThresholdDeciderUnitTests extends ESTestCase { public void testDynamicSettings() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -59,18 +60,15 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { assertTrue(decider.isEnabled()); assertTrue(decider.isIncludeRelocations()); - DiskThresholdDecider.ApplySettings applySettings = decider.newApplySettings(); - Settings newSettings = Settings.builder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, false) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, false) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "500mb") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, "30s") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), false) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "500mb") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "30s") .build(); - applySettings.onRefreshSettings(newSettings); - + nss.applySettings(newSettings); assertThat("high threshold bytes should be unset", decider.getFreeBytesThresholdHigh(), equalTo(ByteSizeValue.parseBytesSizeValue("0b", "test"))); assertThat("high threshold percentage should be changed", @@ -86,7 +84,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanAllocateUsesMaxAvailableSpace() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -127,7 +125,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanRemainUsesLeastAvailableSpace() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); ImmutableOpenMap.Builder shardRoutingMap = ImmutableOpenMap.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java index 940634a4657..be64aafc61e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java @@ -37,7 +37,7 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { public void testEnableRebalance() throws InterruptedException { final String firstNode = internalCluster().startNode(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); // we test with 2 shards since otherwise it's pretty fragile if there are difference in the num or shards such that // all shards are relocated to the second node which is not what we want here. It's solely a test for the settings to take effect final int numShards = 2; @@ -64,7 +64,7 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { assertThat("index: [test] expected to be rebalanced on both nodes", test.size(), equalTo(2)); // flip the cluster wide setting such that we can also balance for index test_1 eventually we should have one shard of each index on each node - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); logger.info("--> balance index [test_1]"); client().admin().cluster().prepareReroute().get(); ensureGreen("test_1"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 0049a120777..b0a49a93936 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -32,10 +31,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Rebalance; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import java.util.EnumSet; @@ -44,8 +44,8 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -58,7 +58,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testClusterEnableNone() { AllocationService strategy = createAllocationService(settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build()); logger.info("Building initial routing table"); @@ -86,7 +86,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testClusterEnableOnlyPrimaries() { AllocationService strategy = createAllocationService(settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.PRIMARIES.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.PRIMARIES.name()) .build()); logger.info("Building initial routing table"); @@ -159,11 +159,11 @@ public class EnableAllocationTests extends ESAllocationTestCase { final boolean useClusterSetting = randomBoolean(); final Rebalance allowedOnes = RandomPicks.randomFrom(getRandom(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL)); Settings build = settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 3) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) .build(); - NodeSettingsService nodeSettingsService = new NodeSettingsService(build); - AllocationService strategy = createAllocationService(build, nodeSettingsService, getRandom()); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(build, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + AllocationService strategy = createAllocationService(build, clusterSettingsService, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -213,7 +213,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { if (useClusterSetting) { prevState = clusterState; clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).transientSettings(settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, allowedOnes) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), allowedOnes) .build())).build(); } else { prevState = clusterState; @@ -224,7 +224,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); } - nodeSettingsService.clusterChanged(new ClusterChangedEvent("foo", clusterState, prevState)); + clusterSettingsService.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 6 shards to be started 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(6)); @@ -261,11 +261,11 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testEnableClusterBalanceNoReplicas() { final boolean useClusterSetting = randomBoolean(); Settings build = settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 3) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) .build(); - NodeSettingsService nodeSettingsService = new NodeSettingsService(build); - AllocationService strategy = createAllocationService(build, nodeSettingsService, getRandom()); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(build, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + AllocationService strategy = createAllocationService(build, clusterSettingsService, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -307,7 +307,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { if (useClusterSetting) { prevState = clusterState; clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).transientSettings(settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL) .build())).build(); } else { prevState = clusterState; @@ -315,7 +315,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices() .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); } - nodeSettingsService.clusterChanged(new ClusterChangedEvent("foo", clusterState, prevState)); + clusterSettingsService.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 4 primaries to be started and 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 126799f5937..a17017f6303 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -46,22 +47,12 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MockDiskUsagesIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - // Update more frequently - .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s") - .build(); - } - @Override protected Collection> nodePlugins() { // Use the mock internal cluster info service, which has fake-able disk usages return pluginList(MockInternalClusterInfoService.TestPlugin.class); } - //@TestLogging("org.elasticsearch.cluster:TRACE,org.elasticsearch.cluster.routing.allocation.decider:TRACE") public void testRerouteOccursOnDiskPassingHighWatermark() throws Exception { List nodes = internalCluster().startNodesAsync(3).get(); @@ -77,15 +68,16 @@ public class MockDiskUsagesIT extends ESIntegTestCase { // Start with all nodes at 50% usage final MockInternalClusterInfoService cis = (MockInternalClusterInfoService) internalCluster().getInstance(ClusterInfoService.class, internalCluster().getMasterName()); + cis.setUpdateFrequency(TimeValue.timeValueMillis(200)); + cis.onMaster(); cis.setN1Usage(nodes.get(0), new DiskUsage(nodes.get(0), "n1", "/dev/null", 100, 50)); cis.setN2Usage(nodes.get(1), new DiskUsage(nodes.get(1), "n2", "/dev/null", 100, 50)); cis.setN3Usage(nodes.get(2), new DiskUsage(nodes.get(2), "n3", "/dev/null", 100, 50)); client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, randomFrom("20b", "80%")) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, randomFrom("10b", "90%")) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, "1ms")).get(); - + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), randomFrom("20b", "80%")) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), randomFrom("10b", "90%")) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "1ms")).get(); // Create an index with 10 shards so we can check allocation for it prepareCreate("test").setSettings(settingsBuilder() .put("number_of_shards", 10) diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 65d5b0b9fcd..ce76893a831 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.settings; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.cluster.ClusterName; @@ -32,7 +33,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.hamcrest.Matchers; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; @@ -48,22 +48,116 @@ public class ClusterSettingsIT extends ESIntegTestCase { public void testClusterNonExistingSettingsUpdate() { String key1 = "no_idea_what_you_are_talking_about"; int value1 = 10; + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(key1, value1).build()) + .get(); + fail("bogus value"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "transient setting [no_idea_what_you_are_talking_about], not dynamically updateable"); + } + } + + public void testResetClusterSetting() { + DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); + + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(key1, value1).build()) + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .addTransientResetKeys(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()) + .get(); + + assertAcked(response); + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build()) + .get(); + + assertAcked(response); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertFalse(discoverySettings.getPublishDiff()); + response = client().admin().cluster() + .prepareUpdateSettings() + .addTransientResetKeys(randomBoolean() ? "discovery.zen.*" : "*") + .get(); + + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + // now persistent + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) + .get(); + + assertAcked(response); + assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .addPersistentResetKeys(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()) + .get(); + + assertAcked(response); + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build()) + .get(); + + assertAcked(response); + assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertFalse(discoverySettings.getPublishDiff()); + response = client().admin().cluster() + .prepareUpdateSettings() + .addPersistentResetKeys(randomBoolean() ? "discovery.zen.*" : "*") + .get(); + + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); } public void testClusterSettingsUpdateResponse() { - String key1 = IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC; + String key1 = IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(); int value1 = 10; - String key2 = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; - boolean value2 = false; + String key2 = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(); + String value2 = EnableAllocationDecider.Allocation.NONE.name(); Settings transientSettings1 = Settings.builder().put(key1, value1, ByteSizeUnit.BYTES).build(); Settings persistentSettings1 = Settings.builder().put(key2, value2).build(); @@ -118,39 +212,45 @@ public class ClusterSettingsIT extends ESIntegTestCase { DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); - assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.DEFAULT_PUBLISH_TIMEOUT)); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "1s").build()) + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT), equalTo("1s")); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); - response = client().admin().cluster() - .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "whatever").build()) - .get(); + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "whatever").build()) + .get(); + fail("bogus value"); + } catch (ElasticsearchParseException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.commit_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); + } - assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); - response = client().admin().cluster() - .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, -1).build()) - .get(); + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), -1).build()) + .get(); + fail("bogus value"); + } catch (ElasticsearchParseException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [-1] for setting [discovery.zen.publish_timeout] must be >= 0s"); + } - assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); } public void testClusterUpdateSettingsWithBlocks() { String key1 = "cluster.routing.allocation.enable"; - Settings transientSettings = Settings.builder().put(key1, false).build(); + Settings transientSettings = Settings.builder().put(key1, EnableAllocationDecider.Allocation.NONE.name()).build(); String key2 = "cluster.routing.allocation.node_concurrent_recoveries"; Settings persistentSettings = Settings.builder().put(key2, "5").build(); @@ -165,7 +265,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertBlocked(request, MetaData.CLUSTER_READ_ONLY_BLOCK); // But it's possible to update the settings to update the "cluster.blocks.read_only" setting - Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY, false).build(); + Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } finally { diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index c5a695d16e5..6094d49234c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -225,7 +225,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testAttributePreferenceRouting() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id,zone") .build()); @@ -280,7 +280,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testNodeSelectorRouting(){ AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index fa4ce357a52..c484df7d99d 100644 --- a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -19,12 +19,13 @@ package org.elasticsearch.common.breaker; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.concurrent.atomic.AtomicBoolean; @@ -87,7 +88,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicReference lastException = new AtomicReference<>(null); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { @Override public CircuitBreaker getBreaker(String name) { @@ -147,7 +148,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicInteger parentTripped = new AtomicInteger(0); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { @Override public CircuitBreaker getBreaker(String name) { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java new file mode 100644 index 00000000000..08ec33d0b43 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -0,0 +1,282 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +public class SettingTests extends ESTestCase { + + + public void testGet() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + assertFalse(booleanSetting.get(Settings.EMPTY)); + assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); + assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); + } + + public void testByteSize() { + Setting byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, Setting.Scope.Cluster); + assertFalse(byteSizeValueSetting.isGroupSetting()); + ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); + assertEquals(byteSizeValue.bytes(), 1024); + AtomicReference value = new AtomicReference<>(null); + ClusterSettingsService.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger, Settings.EMPTY); + try { + settingUpdater.prepareApply(Settings.builder().put("a.byte.size", 12).build()); + fail("no unit"); + } catch (ElasticsearchParseException ex) { + assertEquals("failed to parse setting [a.byte.size] with value [12] as a size in bytes: unit is missing or unrecognized", ex.getMessage()); + } + + assertTrue(settingUpdater.prepareApply(Settings.builder().put("a.byte.size", "12b").build())); + settingUpdater.apply(); + assertEquals(new ByteSizeValue(12), value.get()); + } + + public void testSimpleUpdate() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + AtomicReference atomicBoolean = new AtomicReference<>(null); + ClusterSettingsService.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); + Settings build = Settings.builder().put("foo.bar", false).build(); + settingUpdater.prepareApply(build); + assertNull(atomicBoolean.get()); + settingUpdater.rollback(); + assertNull(atomicBoolean.get()); + build = Settings.builder().put("foo.bar", true).build(); + settingUpdater.prepareApply(build); + assertNull(atomicBoolean.get()); + settingUpdater.apply(); + assertTrue(atomicBoolean.get()); + + // try update bogus value + build = Settings.builder().put("foo.bar", "I am not a boolean").build(); + try { + settingUpdater.prepareApply(build); + fail("not a boolean"); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [I am not a boolean] for setting [foo.bar]", ex.getMessage()); + } + } + + public void testUpdateNotDynamic() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, Setting.Scope.Cluster); + assertFalse(booleanSetting.isGroupSetting()); + AtomicReference atomicBoolean = new AtomicReference<>(null); + try { + booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); + fail("not dynamic"); + } catch (IllegalStateException ex) { + assertEquals("setting [foo.bar] is not dynamic", ex.getMessage()); + } + } + + public void testUpdaterIsIsolated() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + AtomicReference ab1 = new AtomicReference<>(null); + AtomicReference ab2 = new AtomicReference<>(null); + ClusterSettingsService.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger, Settings.EMPTY); + settingUpdater.prepareApply(Settings.builder().put("foo.bar", true).build()); + assertNull(ab1.get()); + assertNull(ab2.get()); + settingUpdater.apply(); + assertTrue(ab1.get()); + assertNull(ab2.get()); + } + + public void testDefault() { + TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); + Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.Cluster); + assertFalse(setting.isGroupSetting()); + String aDefault = setting.getDefault(Settings.EMPTY); + assertEquals(defautlValue.millis() + "ms", aDefault); + assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); + + Setting secondaryDefault = new Setting<>("foo.bar", "_na_", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.Cluster); + assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); + assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); + } + + public void testComplexType() { + AtomicReference ref = new AtomicReference<>(null); + Setting setting = new Setting<>("foo.bar", "", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.Cluster); + assertFalse(setting.isGroupSetting()); + ref.set(setting.get(Settings.EMPTY)); + ComplexType type = ref.get(); + ClusterSettingsService.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); + assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); + settingUpdater.apply(); + assertSame("no update - type has not changed", type, ref.get()); + + // change from default + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar", "2").build())); + settingUpdater.apply(); + assertNotSame("update - type has changed", type, ref.get()); + assertEquals("2", ref.get().foo); + + + // change back to default... + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.baz", "2").build())); + settingUpdater.apply(); + assertNotSame("update - type has changed", type, ref.get()); + assertEquals("", ref.get().foo); + } + + public void testRollback() { + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Cluster); + assertFalse(integerSetting.isGroupSetting()); + AtomicReference ref = new AtomicReference<>(null); + ClusterSettingsService.SettingUpdater settingUpdater = integerSetting.newUpdater(ref::set, logger, Settings.EMPTY); + assertNull(ref.get()); + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.int.bar", "2").build())); + settingUpdater.rollback(); + settingUpdater.apply(); + assertNull(ref.get()); + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.int.bar", "2").build())); + settingUpdater.apply(); + assertEquals(2, ref.get().intValue()); + } + + public void testType() { + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Cluster); + assertEquals(integerSetting.getScope(), Setting.Scope.Cluster); + integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Index); + assertEquals(integerSetting.getScope(), Setting.Scope.Index); + } + + public void testGroups() { + AtomicReference ref = new AtomicReference<>(null); + Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.Cluster); + assertTrue(setting.isGroupSetting()); + ClusterSettingsService.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); + + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build())); + settingUpdater.apply(); + assertNotNull(ref.get()); + Settings settings = ref.get(); + Map asMap = settings.getAsGroups(); + assertEquals(3, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "2"); + assertEquals(asMap.get("3").get("value"), "3"); + + Settings current = ref.get(); + assertFalse(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build())); + settingUpdater.apply(); + assertSame(current, ref.get()); + + // now update and check that we got it + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build())); + settingUpdater.apply(); + assertNotSame(current, ref.get()); + + asMap = ref.get().getAsGroups(); + assertEquals(2, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "2"); + + // now update and check that we got it + assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "4").build())); + settingUpdater.apply(); + assertNotSame(current, ref.get()); + + asMap = ref.get().getAsGroups(); + assertEquals(2, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "4"); + + assertTrue(setting.match("foo.bar.baz")); + assertFalse(setting.match("foo.baz.bar")); + + ClusterSettingsService.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> false); + try { + predicateSettingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build()); + fail("not accepted"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "illegal value can't update [foo.bar.] from [{}] to [{1.value=1, 2.value=2}]"); + } + } + + public static class ComplexType { + + final String foo; + + public ComplexType(String foo) { + this.foo = foo; + } + } + + public static class Composite { + + private Integer b; + private Integer a; + + public void set(Integer a, Integer b) { + this.a = a; + this.b = b; + } + } + + + public void testComposite() { + Composite c = new Composite(); + Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.Cluster); + Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.Cluster); + ClusterSettingsService.SettingUpdater settingUpdater = Setting.compoundUpdater(c::set, a, b, logger, Settings.EMPTY); + assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); + settingUpdater.apply(); + assertNull(c.a); + assertNull(c.b); + + Settings build = Settings.builder().put("foo.int.bar.a", 2).build(); + assertTrue(settingUpdater.prepareApply(build)); + settingUpdater.apply(); + assertEquals(2, c.a.intValue()); + assertNull(c.b); + + Integer aValue = c.a; + assertFalse(settingUpdater.prepareApply(build)); + settingUpdater.apply(); + assertSame(aValue, c.a); + + build = Settings.builder().put("foo.int.bar.a", 2).put("foo.int.bar.b", 5).build(); + assertTrue(settingUpdater.prepareApply(build)); + settingUpdater.apply(); + assertEquals(2, c.a.intValue()); + assertEquals(5, c.b.intValue()); + + // reset to default + assertTrue(settingUpdater.prepareApply(Settings.EMPTY)); + settingUpdater.apply(); + assertEquals(1, c.a.intValue()); + assertEquals(1, c.b.intValue()); + + } + + + + + + +} diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java new file mode 100644 index 00000000000..254c310daba --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class SettingsServiceTests extends ESTestCase { + + public void testAddConsumer() { + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); + SettingsService service = new SettingsService(Settings.EMPTY) { + @Override + protected Setting getSetting(String key) { + if (key.equals(testSetting.getKey())) { + return testSetting; + } + return null; + } + }; + + AtomicInteger consumer = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, consumer::set); + AtomicInteger consumer2 = new AtomicInteger(); + try { + service.addSettingsUpdateConsumer(testSetting2, consumer2::set); + fail("setting not registered"); + } catch (IllegalArgumentException ex) { + assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); + } + + + try { + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {consumer.set(a); consumer2.set(b);}); + fail("setting not registered"); + } catch (IllegalArgumentException ex) { + assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); + } + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(2, consumer.get()); + assertEquals(0, consumer2.get()); + } + + public void testApply() { + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); + SettingsService service = new SettingsService(Settings.EMPTY) { + @Override + protected Setting getSetting(String key) { + if (key.equals(testSetting.getKey())) { + return testSetting; + } else if (key.equals(testSetting2.getKey())) { + return testSetting2; + } + return null; + } + }; + + AtomicInteger consumer = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, consumer::set); + AtomicInteger consumer2 = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting2, consumer2::set, (s) -> s > 0); + + AtomicInteger aC = new AtomicInteger(); + AtomicInteger bC = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {aC.set(a); bC.set(b);}); + + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + try { + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + fail("invalid value"); + } catch (IllegalArgumentException ex) { + assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); + } + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + try { + service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + fail("invalid value"); + } catch (IllegalArgumentException ex) { + assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); + } + + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(2, consumer.get()); + assertEquals(15, consumer2.get()); + assertEquals(2, aC.get()); + assertEquals(15, bC.get()); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index ec0e26091df..4220973ae21 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -162,4 +162,14 @@ public class TimeValueTests extends ESTestCase { assertThat(e.getMessage(), containsString("Failed to parse")); } } + + public void testToStringRep() { + assertThat("-1", equalTo(new TimeValue(-1).getStringRep())); + assertThat("10ms", equalTo(new TimeValue(10, TimeUnit.MILLISECONDS).getStringRep())); + assertThat("1533ms", equalTo(new TimeValue(1533, TimeUnit.MILLISECONDS).getStringRep())); + assertThat("90s", equalTo(new TimeValue(90, TimeUnit.SECONDS).getStringRep())); + assertThat("90m", equalTo(new TimeValue(90, TimeUnit.MINUTES).getStringRep())); + assertThat("36h", equalTo(new TimeValue(36, TimeUnit.HOURS).getStringRep())); + assertThat("1000d", equalTo(new TimeValue(1000, TimeUnit.DAYS).getStringRep())); + } } diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index 7d36c09ee19..f3783fa6d12 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -21,13 +21,14 @@ package org.elasticsearch.common.util; import org.apache.lucene.util.BytesRef; import org.elasticsearch.cache.recycler.PageCacheRecycler; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; @@ -336,9 +337,9 @@ public class BigArraysTests extends ESSingleNodeTestCase { for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) { HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, size - 1, ByteSizeUnit.BYTES) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), size - 1, ByteSizeUnit.BYTES) .build(), - new NodeSettingsService(Settings.EMPTY)); + new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); try { @@ -356,9 +357,9 @@ public class BigArraysTests extends ESSingleNodeTestCase { final long maxSize = randomIntBetween(1 << 10, 1 << 22); HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, maxSize, ByteSizeUnit.BYTES) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) .build(), - new NodeSettingsService(Settings.EMPTY)); + new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); final int size = scaledRandomIntBetween(1, 20); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index b14792a2c33..570c0b424ff 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -132,7 +132,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("http.enabled", false) // just to make test quicker .put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out .build(); @@ -150,7 +150,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // TODO: Rarely use default settings form some of these Settings nodeSettings = Settings.builder() .put(DEFAULT_SETTINGS) - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, minimumMasterNode) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) .build(); if (discoveryConfig == null) { @@ -217,7 +217,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("--> reducing min master nodes to 2"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2)).get()); + .setTransientSettings(Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)).get()); String master = internalCluster().getMasterName(); String nonMaster = null; @@ -293,9 +293,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Wait until the master node sees al 3 nodes again. ensureStableCluster(3, new TimeValue(DISRUPTION_HEALING_OVERHEAD.millis() + networkPartition.expectedTimeToHeal().millis())); - logger.info("Verify no master block with {} set to {}", DiscoverySettings.NO_MASTER_BLOCK, "all"); + logger.info("Verify no master block with {} set to {}", DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all"); client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.NO_MASTER_BLOCK, "all")) + .setTransientSettings(Settings.builder().put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all")) .get(); networkPartition.startDisrupting(); @@ -863,7 +863,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { internalCluster().startNodesAsync(3, Settings.builder() .put(DiscoveryService.SETTING_INITIAL_STATE_TIMEOUT, "1ms") - .put(DiscoverySettings.PUBLISH_TIMEOUT, "3s") + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "3s") .build()).get(); logger.info("applying disruption while cluster is forming ..."); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index ea590756a8b..2dac067fed3 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; @@ -38,7 +39,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -67,7 +68,7 @@ public class NodeJoinControllerTests extends ESTestCase { // make sure we have a master clusterService.setState(ClusterState.builder(clusterService.state()).nodes(DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.id()))); nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)), Settings.EMPTY); + new DiscoverySettings(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))), Settings.EMPTY); } public void testSimpleJoinAccumulation() throws InterruptedException, ExecutionException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 0b5f9997dba..3b6708630ee 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -84,7 +84,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase { assertThat(zenDiscovery.isRejoinOnMasterGone(), is(true)); client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(ZenDiscovery.SETTING_REJOIN_ON_MASTER_GONE, false)) + .setTransientSettings(Settings.builder().put(ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING.getKey(), false)) .get(); assertThat(zenDiscovery.isRejoinOnMasterGone(), is(false)); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index d33aadd84aa..3cb5316cacc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; @@ -42,7 +43,7 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -156,7 +157,7 @@ public class PublishClusterStateActionTests extends ESTestCase { public MockNode createMockNode(String name, Settings settings, Version version, @Nullable ClusterStateListener listener) throws Exception { settings = Settings.builder() .put("name", name) - .put(TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .put(settings) .build(); @@ -237,7 +238,7 @@ public class PublishClusterStateActionTests extends ESTestCase { protected MockPublishAction buildPublishClusterStateAction(Settings settings, MockTransportService transportService, DiscoveryNodesProvider nodesProvider, PublishClusterStateAction.NewPendingClusterStateListener listener) { - DiscoverySettings discoverySettings = new DiscoverySettings(settings, new NodeSettingsService(settings)); + DiscoverySettings discoverySettings = new DiscoverySettings(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); return new MockPublishAction(settings, transportService, nodesProvider, listener, discoverySettings, ClusterName.DEFAULT); } @@ -345,7 +346,7 @@ public class PublishClusterStateActionTests extends ESTestCase { } public void testDisablingDiffPublishing() throws Exception { - Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE, false).build(); + Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build(); MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, Version.CURRENT, new ClusterStateListener() { @Override @@ -384,7 +385,7 @@ public class PublishClusterStateActionTests extends ESTestCase { public void testSimultaneousClusterStatePublishing() throws Exception { int numberOfNodes = randomIntBetween(2, 10); int numberOfIterations = scaledRandomIntBetween(5, 50); - Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE, randomBoolean()).build(); + Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), randomBoolean()).build(); MockNode master = createMockNode("node0", settings, Version.CURRENT, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { @@ -510,8 +511,8 @@ public class PublishClusterStateActionTests extends ESTestCase { final boolean expectingToCommit = randomBoolean(); Settings.Builder settings = Settings.builder(); // make sure we have a reasonable timeout if we expect to timeout, o.w. one that will make the test "hang" - settings.put(DiscoverySettings.COMMIT_TIMEOUT, expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h") - .put(DiscoverySettings.PUBLISH_TIMEOUT, "5ms"); // test is about committing + settings.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h") + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "5ms"); // test is about committing MockNode master = createMockNode("master", settings.build()); @@ -695,7 +696,7 @@ public class PublishClusterStateActionTests extends ESTestCase { */ public void testTimeoutOrCommit() throws Exception { Settings settings = Settings.builder() - .put(DiscoverySettings.COMMIT_TIMEOUT, "1ms").build(); // short but so we will sometime commit sometime timeout + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "1ms").build(); // short but so we will sometime commit sometime timeout MockNode master = createMockNode("master", settings); MockNode node = createMockNode("node", settings); diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 15ddc9dd771..2c6a55da242 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -57,7 +57,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) .build()); @@ -111,7 +111,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) .build()); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java index 2184fda47c4..dbdf747de63 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java @@ -82,9 +82,9 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase { SearchResponse countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none")).execute().actionGet(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")).execute().actionGet(); backwardsCluster().upgradeAllNodes(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all")).execute().actionGet(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all")).execute().actionGet(); ensureGreen(); countResponse = client().prepareSearch().setSize(0).get(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 01c76b465a9..31e992afb62 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -360,7 +360,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); @@ -377,7 +377,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java index 21ecdf710b7..457cf31ec83 100644 --- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java @@ -56,7 +56,7 @@ public class TransportIndexFailuresIT extends ESIntegTestCase { .put("discovery.type", "zen") // <-- To override the local setting if set externally .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("discovery.zen.minimum_master_nodes", 1) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 4858e0a6e3c..15249a49727 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -111,9 +111,9 @@ public class CorruptedFileIT extends ESIntegTestCase { // and we need to make sure primaries are not just trashed if we don't have replicas .put(super.nodeSettings(nodeOrdinal)) // speed up recoveries - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10) - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) + .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), 10) + .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), 10) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 5) .build(); } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index 8de3af25827..4f6aaf25705 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -170,14 +170,14 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { //add a node: 3 out of the 6 shards will be relocated to it //disable allocation before starting a new node, as we need to register the listener first assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); String node2 = internalCluster().startNode(); IndexShardStateChangeListener stateChangeListenerNode2 = new IndexShardStateChangeListener(); //add a listener that keeps track of the shard state changes internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node2).setNewDelegate(stateChangeListenerNode2); //re-enable allocation assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); ensureGreen(); //the 3 relocated shards get closed on the first node diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index 3398839b905..08df60126a5 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -43,10 +43,10 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { return Settings.builder() .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop") // This is set low, because if the "noop" is not a noop, it will break - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop") // This is set low, because if the "noop" is not a noop, it will break - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .build(); } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index fcd94d99585..1af04e295dd 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -63,13 +63,13 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { private void reset() { logger.info("--> resetting breaker settings"); Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.DEFAULT_FIELDDATA_BREAKER_LIMIT) - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, - HierarchyCircuitBreakerService.DEFAULT_FIELDDATA_OVERHEAD_CONSTANT) - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.DEFAULT_REQUEST_BREAKER_LIMIT) - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); } @@ -119,8 +119,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Update circuit breaker settings Settings settings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "100b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.05) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "100b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.05) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -168,8 +168,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Update circuit breaker settings Settings settings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "100b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.05) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "100b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.05) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -213,8 +213,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { .getNodes()[0].getBreaker().getStats(CircuitBreaker.REQUEST).getLimit(); Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); @@ -234,9 +234,9 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Adjust settings so the parent breaker will fail, but the fielddata breaker doesn't resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, "15b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "90%") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "15b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "90%") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings).execute().actionGet(); @@ -261,7 +261,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Make request breaker limited to a small amount Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java index 741ea305254..38da1ac135c 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java @@ -19,12 +19,13 @@ package org.elasticsearch.indices.memory.breaker; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -66,7 +67,7 @@ public class CircuitBreakerUnitTests extends ESTestCase { } public void testRegisterCustomBreaker() throws Exception { - CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)); + CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); String customName = "custom"; BreakerSettings settings = new BreakerSettings(customName, 20, 1.0); service.registerBreaker(settings); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 5ece413f796..50346a2903c 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -142,8 +142,8 @@ public class IndexRecoveryIT extends ESIntegTestCase { assertTrue(client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() // one chunk per sec.. - .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, chunkSize, ByteSizeUnit.BYTES) - .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, chunkSize, ByteSizeUnit.BYTES) + .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), chunkSize, ByteSizeUnit.BYTES) + .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.getKey(), chunkSize, ByteSizeUnit.BYTES) ) .get().isAcknowledged()); } @@ -151,8 +151,8 @@ public class IndexRecoveryIT extends ESIntegTestCase { private void restoreRecoverySpeed() { assertTrue(client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() - .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, "20mb") - .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, "512kb") + .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), "20mb") + .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.getKey(), "512kb") ) .get().isAcknowledged()); } @@ -525,8 +525,8 @@ public class IndexRecoveryIT extends ESIntegTestCase { public void testDisconnectsWhileRecovering() throws Exception { final String indexName = "test"; final Settings nodeSettings = Settings.builder() - .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, "100ms") - .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, "1s") + .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") + .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again .build(); // start a master node diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 8346003287c..ca70fc1fe2f 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.settings.Settings; @@ -44,7 +45,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; @@ -60,7 +61,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class RecoverySourceHandlerTests extends ESTestCase { private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1); - private final NodeSettingsService service = new NodeSettingsService(Settings.EMPTY); + private final ClusterSettingsService service = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); public void testSendFiles() throws Throwable { Settings settings = Settings.builder().put("indices.recovery.concurrent_streams", 1). diff --git a/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java b/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java index bde40aa928f..8ec629dbbdc 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java @@ -40,7 +40,7 @@ public class CloseIndexDisableCloseAllIT extends ESIntegTestCase { // The cluster scope is test b/c we can't clear cluster settings. public void testCloseAllRequiresName() { Settings clusterSettings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(clusterSettings)); createIndex("test1", "test2", "test3"); @@ -91,7 +91,7 @@ public class CloseIndexDisableCloseAllIT extends ESIntegTestCase { createIndex("test_no_close"); healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); assertThat(healthResponse.isTimedOut(), equalTo(false)); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(TransportCloseIndexAction.SETTING_CLUSTER_INDICES_CLOSE_ENABLE, false)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey(), false)).get(); try { client.admin().indices().prepareClose("test_no_close").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 96611aeca8a..2e73a466677 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -171,7 +171,7 @@ public class RareClusterStateIT extends ESIntegTestCase { ensureGreen("test"); // now that the cluster is stable, remove publishing timeout - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0"))); + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0"))); Set nodes = new HashSet<>(Arrays.asList(internalCluster().getNodeNames())); nodes.remove(internalCluster().getMasterName()); @@ -200,7 +200,7 @@ public class RareClusterStateIT extends ESIntegTestCase { // but the change might not be on the node that performed the indexing // operation yet - Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0ms").build(); + Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0ms").build(); final List nodeNames = internalCluster().startNodesAsync(2, settings).get(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index fc4dd4f6487..948b76b963d 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -303,7 +303,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable allocation to control the situation more easily assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.debug("--> shutting down two random nodes"); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node1, node2, node3)); @@ -322,7 +322,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", "NONE"))); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); logger.debug("--> waiting for shards to recover on [{}]", node4); // we have to do this in two steps as we now do async shard fetching before assigning, so the change to the @@ -340,7 +340,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable allocation again to control concurrency a bit and allow shard active to kick in before allocation assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.debug("--> starting the two old nodes back"); @@ -351,7 +351,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); logger.debug("--> waiting for the lost shard to be recovered"); @@ -396,7 +396,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable relocations when we do this, to make sure the shards are not relocated from node2 // due to rebalancing, and delete its content - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java index 49d22b87bf8..514b1757e41 100644 --- a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java @@ -34,7 +34,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { // The cluster scope is test b/c we can't clear cluster settings. public void testDestructiveOperations() throws Exception { Settings settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -58,7 +58,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { } settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, false) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -68,7 +68,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { // end delete index: // close index: settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -100,7 +100,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { } settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, false) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); assertAcked(client().admin().indices().prepareClose("_all").get()); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java index b0288042ecc..8d659c13e13 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java @@ -32,74 +32,74 @@ public class RecoverySettingsTests extends ESSingleNodeTestCase { } public void testAllSettingsAreDynamicallyUpdatable() { - innerTestSettings(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, randomIntBetween(1, 200), ByteSizeUnit.BYTES, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.getKey(), randomIntBetween(1, 200), ByteSizeUnit.BYTES, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.fileChunkSize().bytesAsInt()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, randomIntBetween(1, 200), new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS_SETTING.getKey(), randomIntBetween(1, 200), new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.translogOps()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, randomIntBetween(1, 200), ByteSizeUnit.BYTES, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING.getKey(), randomIntBetween(1, 200), ByteSizeUnit.BYTES, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.translogSize().bytesAsInt()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, randomIntBetween(1, 200), new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), randomIntBetween(1, 200), new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.concurrentStreamPool().getMaximumPoolSize()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, randomIntBetween(1, 200), new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), randomIntBetween(1, 200), new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.concurrentSmallFileStreamPool().getMaximumPoolSize()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, 0, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), 0, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(null, recoverySettings.rateLimiter()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.retryDelayStateSync().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.retryDelayNetwork().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.activityTimeout().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.internalActionTimeout().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.internalActionLongTimeout().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_COMPRESS, false, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING.getKey(), false, new Validator() { @Override public void validate(RecoverySettings recoverySettings, boolean expectedValue) { assertEquals(expectedValue, recoverySettings.compress()); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 57b5e888ea9..541911ce4e0 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -387,7 +387,7 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> stopping replica assignment"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.info("--> wait for all replica shards to be removed, on all nodes"); assertBusy(new Runnable() { diff --git a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index d94f72ea80f..4f85eb77340 100644 --- a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -62,7 +62,7 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(randomIntBetween(50, 300), ByteSizeUnit.BYTES)); + .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(randomIntBetween(50, 300), ByteSizeUnit.BYTES)); return builder.build(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 51ae038ca0d..8fde9bbf330 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -56,7 +56,7 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // Rebalancing is causing some checks after restore to randomly fail // due to https://github.com/elastic/elasticsearch/issues/9421 - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index f9392836d8b..2c38c4c74c1 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -123,14 +123,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> set test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.settingsBuilder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) - .put(IndicesTTLService.INDICES_TTL_INTERVAL, random, TimeUnit.MINUTES)) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) + .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), random, TimeUnit.MINUTES)) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); + .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), equalTo(2)); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(2)); logger.info("--> create repository"); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") @@ -146,23 +146,25 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> clean the test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.settingsBuilder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 1) - .put(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1))) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1) + .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1))) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); + .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); stopNode(secondNode); assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("1").get().isTimedOut(), equalTo(false)); logger.info("--> restore snapshot"); - client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet(); - assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); - + try { + client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet(); + fail("can't restore minimum master nodes"); + } catch (IllegalArgumentException ex) { + assertEquals("cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [1]", ex.getMessage()); + } logger.info("--> ensure that zen discovery minimum master nodes wasn't restored"); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), not(equalTo(2))); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), not(equalTo(2))); } public void testRestoreCustomMetadata() throws Exception { @@ -554,7 +556,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest public void testRestoreIndexWithShardsMissingInLocalGateway() throws Exception { logger.info("--> start 2 nodes"); Settings nodeSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); internalCluster().startNode(nodeSettings); diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java deleted file mode 100644 index 3dfca5cb283..00000000000 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed wit[√h - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.threadpool; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.settings.Validator; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.util.*; - -import static org.junit.Assert.*; - -public class ThreadPoolTypeSettingsValidatorTests extends ESTestCase { - private Validator validator; - - @Before - public void setUp() throws Exception { - super.setUp(); - validator = ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR; - } - - public void testValidThreadPoolTypeSettings() { - for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { - assertNull(validateSetting(validator, entry.getKey(), entry.getValue().getType())); - } - } - - public void testInvalidThreadPoolTypeSettings() { - for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { - Set set = new HashSet<>(); - set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values())); - set.remove(entry.getValue()); - ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); - String expectedMessage = String.format( - Locale.ROOT, - "thread pool type for [%s] can only be updated to [%s] but was [%s]", - entry.getKey(), - entry.getValue().getType(), - invalidThreadPoolType.getType()); - String message = validateSetting(validator, entry.getKey(), invalidThreadPoolType.getType()); - assertNotNull(message); - assertEquals(expectedMessage, message); - } - } - - public void testNonThreadPoolTypeSetting() { - String setting = ThreadPool.THREADPOOL_GROUP + randomAsciiOfLength(10) + "foo"; - String value = randomAsciiOfLength(10); - assertNull(validator.validate(setting, value, ClusterState.PROTO)); - } - - private String validateSetting(Validator validator, String threadPoolName, String value) { - return validator.validate(ThreadPool.THREADPOOL_GROUP + threadPoolName + ".type", value, ClusterState.PROTO); - } -} diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 95ceea1e490..47d82a36088 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -19,8 +19,10 @@ package org.elasticsearch.threadpool; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool.Names; @@ -90,9 +92,10 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); - - threadPool.updateSettings( + clusterSettingsService.applySettings( settingsBuilder() .put("threadpool." + threadPoolName + ".type", invalidThreadPoolType.getType()) .build() @@ -111,14 +114,16 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.CACHED); ThreadPool threadPool = null; try { - threadPool = new ThreadPool( - Settings.settingsBuilder() - .put("name", "testCachedExecutorType").build()); + Settings nodeSettings = Settings.settingsBuilder() + .put("name", "testCachedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - threadPool.updateSettings(settingsBuilder() + Settings settings = clusterSettingsService.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); @@ -134,7 +139,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change keep alive Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value changed assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -143,7 +148,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Set the same keep alive - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value didn't change assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -160,11 +165,13 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder() - .put("name", "testCachedExecutorType").build()); + Settings nodeSettings = Settings.settingsBuilder() + .put("name", "testFixedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - - threadPool.updateSettings(settingsBuilder() + Settings settings = clusterSettingsService.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".size", "15") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); @@ -177,7 +184,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(0L)); // Put old type back - threadPool.updateSettings(Settings.EMPTY); + settings = clusterSettingsService.applySettings(Settings.EMPTY); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); // Make sure keep alive value is not used assertThat(info(threadPool, threadPoolName).getKeepAlive(), nullValue()); @@ -190,7 +197,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change size Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".size", "10").build()); + settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".size", "10").build()); // Make sure size values changed assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(10)); @@ -201,8 +208,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Change queue capacity - threadPool.updateSettings(settingsBuilder() - .put("threadpool." + threadPoolName + ".queue", "500") + settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".queue", "500") .build()); } finally { terminateThreadPoolIfNeeded(threadPool); @@ -213,9 +219,12 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.SCALING); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder() + Settings nodeSettings = settingsBuilder() .put("threadpool." + threadPoolName + ".size", 10) - .put("name", "testCachedExecutorType").build()); + .put("name", "testScalingExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1)); assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L)); @@ -224,7 +233,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change settings that doesn't require pool replacement Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder() + clusterSettingsService.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .put("threadpool." + threadPoolName + ".min", "2") .put("threadpool." + threadPoolName + ".size", "15") @@ -248,9 +257,12 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(Settings.settingsBuilder() + Settings nodeSettings = Settings.settingsBuilder() .put("threadpool." + threadPoolName + ".queue_size", 1000) - .put("name", "testCachedExecutorType").build()); + .put("name", "testCachedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L); final CountDownLatch latch = new CountDownLatch(1); @@ -264,7 +276,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { } } ); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); + clusterSettingsService.applySettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); assertThat(threadPool.executor(threadPoolName), not(sameInstance(oldExecutor))); assertThat(oldExecutor.isShutdown(), equalTo(true)); assertThat(oldExecutor.isTerminating(), equalTo(true)); @@ -279,12 +291,15 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { public void testCustomThreadPool() throws Exception { ThreadPool threadPool = null; try { - threadPool = new ThreadPool(Settings.settingsBuilder() + Settings nodeSettings = Settings.settingsBuilder() .put("threadpool.my_pool1.type", "scaling") .put("threadpool.my_pool2.type", "fixed") .put("threadpool.my_pool2.size", "1") .put("threadpool.my_pool2.queue_size", "1") - .put("name", "testCustomThreadPool").build()); + .put("name", "testCustomThreadPool").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + threadPool.setNodeSettingsService(clusterSettingsService); ThreadPoolInfo groups = threadPool.info(); boolean foundPool1 = false; boolean foundPool2 = false; @@ -316,7 +331,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Settings settings = Settings.builder() .put("threadpool.my_pool2.size", "10") .build(); - threadPool.updateSettings(settings); + clusterSettingsService.applySettings(settings); groups = threadPool.info(); foundPool1 = false; diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index becb61666da..f7a9c221f7c 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -21,12 +21,14 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -69,12 +71,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { super.setUp(); threadPool = new ThreadPool(getClass().getName()); serviceA = build( - Settings.builder().put("name", "TS_A", TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING").build(), + Settings.builder().put("name", "TS_A", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version0, new NamedWriteableRegistry() ); nodeA = new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), version0); serviceB = build( - Settings.builder().put("name", "TS_B", TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING").build(), + Settings.builder().put("name", "TS_B", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version1, new NamedWriteableRegistry() ); nodeB = new DiscoveryNode("TS_B", "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), version1); @@ -650,9 +652,10 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { includeSettings = "test"; excludeSettings = "DOESN'T_MATCH"; } - - serviceA.applySettings(Settings.builder() - .put(TransportService.SETTING_TRACE_LOG_INCLUDE, includeSettings, TransportService.SETTING_TRACE_LOG_EXCLUDE, excludeSettings) + ClusterSettingsService service = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + serviceA.setDynamicSettings(service); + service.applySettings(Settings.builder() + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), includeSettings, TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), excludeSettings) .build()); tracer.reset(4); diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index 3f140b388fd..ca3e4a21604 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -28,7 +29,7 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty.NettyTransport; @@ -64,7 +65,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { threadPool = new ThreadPool(settings); - threadPool.setNodeSettingsService(new NodeSettingsService(settings)); + threadPool.setNodeSettingsService(new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); NetworkService networkService = new NetworkService(settings); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(settings, threadPool), new NoneCircuitBreakerService()); nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, Version.CURRENT, new NamedWriteableRegistry()); diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java index 36b20b09fc1..59b57fca7de 100755 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java @@ -27,7 +27,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,9 +40,9 @@ public class AzureDiscovery extends ZenDiscovery { @Inject public AzureDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java index e94b7618d12..e0d15a584bd 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java @@ -27,7 +27,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,10 +40,10 @@ public class Ec2Discovery extends ZenDiscovery { @Inject public Ec2Discovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java index f20d1c74f83..e004230b735 100755 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java @@ -27,7 +27,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,10 +40,10 @@ public class GceDiscovery extends ZenDiscovery { @Inject public GceDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, pingService, electMasterService, discoverySettings); } } diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 6ac2101fe52..b105d269163 100644 --- a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -31,8 +31,9 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; @@ -77,11 +78,11 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { } @Inject - public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, + public MockInternalClusterInfoService(Settings settings, ClusterSettingsService clusterSettingsService, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { - super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); + super(settings, clusterSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); this.clusterName = ClusterName.clusterNameFromSettings(settings); stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); @@ -133,4 +134,9 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { return "/dev/null"; } } + + @Override + public void setUpdateFrequency(TimeValue updateFrequency) { + super.setUpdateFrequency(updateFrequency); + } } diff --git a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 9a7a3efa3dc..09a0ab9b6d3 100644 --- a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.dfs.DfsPhase; @@ -67,10 +67,10 @@ public class MockSearchService extends SearchService { } @Inject - public MockSearchService(Settings settings, NodeSettingsService nodeSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, - ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, - DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { - super(settings, nodeSettingsService, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, + public MockSearchService(Settings settings, ClusterSettingsService clusterSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, + ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, + DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { + super(settings, clusterSettingsService, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, queryPhase, fetchPhase, indicesQueryCache); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index c4f4b196739..e4d3a3c42a5 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -43,7 +44,7 @@ import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.lang.reflect.Constructor; @@ -67,37 +68,37 @@ public abstract class ESAllocationTestCase extends ESTestCase { } public static MockAllocationService createAllocationService(Settings settings, Random random) { - return createAllocationService(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), random); + return createAllocationService(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), random); } - public static MockAllocationService createAllocationService(Settings settings, NodeSettingsService nodeSettingsService, Random random) { + public static MockAllocationService createAllocationService(Settings settings, ClusterSettingsService clusterSettingsService, Random random) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, nodeSettingsService, random), + randomAllocationDeciders(settings, clusterSettingsService, random), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), + randomAllocationDeciders(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), clusterInfoService); } public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator allocator) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), + randomAllocationDeciders(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), new ShardsAllocators(settings, allocator), EmptyClusterInfoService.INSTANCE); } - public static AllocationDeciders randomAllocationDeciders(Settings settings, NodeSettingsService nodeSettingsService, Random random) { + public static AllocationDeciders randomAllocationDeciders(Settings settings, ClusterSettingsService clusterSettingsService, Random random) { final List> defaultAllocationDeciders = ClusterModule.DEFAULT_ALLOCATION_DECIDERS; final List list = new ArrayList<>(); for (Class deciderClass : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { try { try { - Constructor constructor = deciderClass.getConstructor(Settings.class, NodeSettingsService.class); - list.add(constructor.newInstance(settings, nodeSettingsService)); + Constructor constructor = deciderClass.getConstructor(Settings.class, ClusterSettingsService.class); + list.add(constructor.newInstance(settings, clusterSettingsService)); } catch (NoSuchMethodException e) { Constructor constructor = null; constructor = deciderClass.getConstructor(Settings.class); diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 47e163a6291..dbd260491f6 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -136,7 +136,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { if (globalCompatibilityVersion().before(Version.V_1_3_2)) { // if we test against nodes before 1.3.2 we disable all the compression due to a known bug // see #7210 - builder.put(RecoverySettings.INDICES_RECOVERY_COMPRESS, false); + builder.put(RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING.getKey(), false); } return builder; } @@ -255,7 +255,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { // if we test against nodes before 1.3.2 we disable all the compression due to a known bug // see #7210 builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, false) - .put(RecoverySettings.INDICES_RECOVERY_COMPRESS, false); + .put(RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING.getKey(), false); } return builder.build(); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index cc5348bc6bc..24adcf960eb 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1039,7 +1039,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ public void setMinimumMasterNodes(int n) { assertTrue(client().admin().cluster().prepareUpdateSettings().setTransientSettings( - settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, n)) + settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), n)) .get().isAcknowledged()); } @@ -1474,7 +1474,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** Sets or unsets the cluster read_only mode **/ public static void setClusterReadOnly(boolean value) { - Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY, value).build(); + Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), value).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } @@ -1687,8 +1687,8 @@ public abstract class ESIntegTestCase extends ESTestCase { Settings.Builder builder = settingsBuilder() // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b") .put("script.indexed", "on") .put("script.inline", "on") // wait short time for other active shards before actually deleting, default 30s not needed in tests diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index b86c8689699..c8eacc4c1ae 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -292,19 +292,19 @@ public final class InternalTestCluster extends TestCluster { } // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space - builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b"); - builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b"); + builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b"); + builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b"); if (TEST_NIGHTLY) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, RandomInts.randomIntBetween(random, 10, 15)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RandomInts.randomIntBetween(random, 10, 15)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, RandomInts.randomIntBetween(random, 5, 10)); + builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 10, 15)); + builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 10, 15)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); } else if (random.nextInt(100) <= 90) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, RandomInts.randomIntBetween(random, 3, 6)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RandomInts.randomIntBetween(random, 3, 6)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, RandomInts.randomIntBetween(random, 2, 5)); + builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 3, 6)); + builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 3, 6)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); } // always reduce this - it can make tests really slow - builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); + builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); defaultSettings = builder.build(); executor = EsExecutors.newCached("test runner", 0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName)); } @@ -412,7 +412,7 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, new TimeValue(RandomInts.randomIntBetween(random, 10, 30), TimeUnit.SECONDS)); + builder.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.getKey(), new TimeValue(RandomInts.randomIntBetween(random, 10, 30), TimeUnit.SECONDS)); } if (random.nextInt(10) == 0) { @@ -430,25 +430,25 @@ public final class InternalTestCluster extends TestCluster { if (random.nextBoolean()) { if (random.nextInt(10) == 0) { // do something crazy slow here - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); } else { - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); } } if (random.nextBoolean()) { - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE, RandomPicks.randomFrom(random, StoreRateLimiting.Type.values())); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING.getKey(), RandomPicks.randomFrom(random, StoreRateLimiting.Type.values())); } if (random.nextBoolean()) { if (random.nextInt(10) == 0) { // do something crazy slow here - builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); + builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); } else { - builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); + builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); } } if (random.nextBoolean()) { - builder.put(RecoverySettings.INDICES_RECOVERY_COMPRESS, random.nextBoolean()); + builder.put(RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { From 2e27ee393f04b95c9c51b1553cf91c0ae57e21af Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Dec 2015 21:19:40 +0100 Subject: [PATCH 003/322] add rest API to reset settings --- .../ClusterUpdateSettingsRequest.java | 20 +-- .../ClusterUpdateSettingsRequestBuilder.java | 20 --- .../cluster/settings/SettingsUpdater.java | 130 ++++++++++++++++++ .../TransportClusterUpdateSettingsAction.java | 104 ++------------ .../common/settings/Settings.java | 8 +- .../loader/XContentSettingsLoader.java | 18 +-- .../settings/SettingsUpdaterTests.java | 127 +++++++++++++++++ .../cluster/settings/ClusterSettingsIT.java | 35 ++++- .../common/settings/SettingsServiceTests.java | 1 - .../cluster/update-settings.asciidoc | 25 ++++ .../test/cluster.put_settings/11_reset.yaml | 31 +++++ 11 files changed, 371 insertions(+), 148 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java create mode 100644 core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 96e90e1afc3..1f6bf871101 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -44,8 +44,6 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest transientReset = new HashSet<>(); - private Set persistentReset = new HashSet<>(); public ClusterUpdateSettingsRequest() { } @@ -53,7 +51,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest getTransientReset() { return Collections.unmodifiableSet(transientReset); } - - public Set getPersistentReset() { return Collections.unmodifiableSet(persistentReset); } - - public void addTransientResetKeys(Collection keys) { - transientReset.addAll(keys); - } - - public void addPersistentResetKeys(Collection keys) { - persistentReset.addAll(keys); - } - /** * Sets the transient settings to be updated. They will not survive a full cluster restart */ @@ -162,8 +148,6 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest(Arrays.asList(in.readStringArray())); - persistentReset = new HashSet<>(Arrays.asList(in.readStringArray())); readTimeout(in); } @@ -172,8 +156,6 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest keys) { - request.addTransientResetKeys(keys); - return this; - } - - public ClusterUpdateSettingsRequestBuilder addPersistentResetKeys(Collection keys) { - request.addPersistentResetKeys(keys); - return this; - } - - public ClusterUpdateSettingsRequestBuilder addTransientResetKeys(String... keys) { - request.addTransientResetKeys(Arrays.asList(keys)); - return this; - } - - public ClusterUpdateSettingsRequestBuilder addPersistentResetKeys(String... keys) { - request.addPersistentResetKeys(Arrays.asList(keys)); - return this; - } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java new file mode 100644 index 00000000000..a6b61844a1e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.ClusterSettingsService; +import org.elasticsearch.common.settings.Settings; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.ClusterState.builder; + +/** + * Updates transient and persistent cluster state settings if there are any changes + * due to the update. + */ +final class SettingsUpdater { + final Settings.Builder transientUpdates = Settings.settingsBuilder(); + final Settings.Builder persistentUpdates = Settings.settingsBuilder(); + private final ClusterSettings dynamicSettings; + private final ClusterSettingsService clusterSettingsService; + + SettingsUpdater(ClusterSettingsService clusterSettingsService) { + this.dynamicSettings = clusterSettingsService.getClusterSettings(); + this.clusterSettingsService = clusterSettingsService; + } + + synchronized Settings getTransientUpdates() { + return transientUpdates.build(); + } + + synchronized Settings getPersistentUpdate() { + return persistentUpdates.build(); + } + + synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) { + boolean changed = false; + Settings.Builder transientSettings = Settings.settingsBuilder(); + transientSettings.put(currentState.metaData().transientSettings()); + changed |= apply(transientToApply, transientSettings, transientUpdates, "transient"); + + Settings.Builder persistentSettings = Settings.settingsBuilder(); + persistentSettings.put(currentState.metaData().persistentSettings()); + changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent"); + + if (!changed) { + return currentState; + } + + MetaData.Builder metaData = MetaData.builder(currentState.metaData()) + .persistentSettings(persistentSettings.build()) + .transientSettings(transientSettings.build()); + + ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) || MetaData.SETTING_READ_ONLY_SETTING.get(metaData.transientSettings()); + if (updatedReadOnly) { + blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); + } else { + blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); + } + ClusterState build = builder(currentState).metaData(metaData).blocks(blocks).build(); + Settings settings = build.metaData().settings(); + // now we try to apply things and if they are invalid we fail + // this dryRun will validate & parse settings but won't actually apply them. + clusterSettingsService.dryRun(settings); + return build; + } + + private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + boolean changed = false; + final Set toRemove = new HashSet<>(); + Settings.Builder settingsBuilder = Settings.settingsBuilder(); + for (Map.Entry entry : toApply.getAsMap().entrySet()) { + if (entry.getValue() == null) { + toRemove.add(entry.getKey()); + } else if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { + settingsBuilder.put(entry.getKey(), entry.getValue()); + updates.put(entry.getKey(), entry.getValue()); + changed = true; + } else { + throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + } + + } + changed |= applyDeletes(toRemove, target); + target.put(settingsBuilder.build()); + return changed; + } + + private final boolean applyDeletes(Set deletes, Settings.Builder builder) { + boolean changed = false; + for (String entry : deletes) { + Set keysToRemove = new HashSet<>(); + Set keySet = builder.internalMap().keySet(); + for (String key : keySet) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String key : keysToRemove) { + builder.remove(key); + changed = true; + } + } + return changed; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 90cc68abe91..9170b9168b1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -28,26 +28,19 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import static org.elasticsearch.cluster.ClusterState.builder; /** @@ -57,16 +50,13 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct private final AllocationService allocationService; - private final ClusterSettings dynamicSettings; private final ClusterSettingsService clusterSettingsService; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - AllocationService allocationService, ClusterSettings dynamicSettings, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettingsService clusterSettingsService) { + AllocationService allocationService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettingsService clusterSettingsService) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new); this.allocationService = allocationService; - this.dynamicSettings = dynamicSettings; this.clusterSettingsService = clusterSettingsService; } @@ -93,9 +83,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener listener) { - final Settings.Builder transientUpdates = Settings.settingsBuilder(); - final Settings.Builder persistentUpdates = Settings.settingsBuilder(); - + final SettingsUpdater updater = new SettingsUpdater(clusterSettingsService); clusterService.submitStateUpdateTask("cluster_update_settings", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { @@ -103,7 +91,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { - return new ClusterUpdateSettingsResponse(acknowledged, transientUpdates.build(), persistentUpdates.build()); + return new ClusterUpdateSettingsResponse(acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate()); } @Override @@ -130,7 +118,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct // so we should *not* execute the reroute. if (!clusterService.state().nodes().localNodeMaster()) { logger.debug("Skipping reroute after cluster update settings, because node is no longer master"); - listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); + listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate())); return; } @@ -150,13 +138,13 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override //we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the update settings was acknowledged protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { - return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, transientUpdates.build(), persistentUpdates.build()); + return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate()); } @Override public void onNoLongerMaster(String source) { logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master"); - listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); + listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate())); } @Override @@ -186,83 +174,11 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override public ClusterState execute(final ClusterState currentState) { - Settings.Builder transientSettings = Settings.settingsBuilder(); - transientSettings.put(currentState.metaData().transientSettings()); - for (Map.Entry entry : request.transientSettings().getAsMap().entrySet()) { - if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { - transientSettings.put(entry.getKey(), entry.getValue()); - transientUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - throw new IllegalArgumentException("transient setting [" + entry.getKey() + "], not dynamically updateable"); - } - } - - Settings.Builder persistentSettings = Settings.settingsBuilder(); - persistentSettings.put(currentState.metaData().persistentSettings()); - for (Map.Entry entry : request.persistentSettings().getAsMap().entrySet()) { - if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { - persistentSettings.put(entry.getKey(), entry.getValue()); - persistentUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - throw new IllegalArgumentException("persistent setting [" + entry.getKey() + "], not dynamically updateable"); - } - } - - for (String entry : request.getPersistentReset()) { - Set strings = persistentSettings.internalMap().keySet(); - Set keysToRemove = new HashSet(); - for (String key : strings) { - if (Regex.simpleMatch(entry, key)) { - keysToRemove.add(key); - } - } - for (String keyToRemove : keysToRemove) { - persistentSettings.remove(keyToRemove); - persistentUpdates.remove(keyToRemove); - } - changed |= keysToRemove.isEmpty() == false; - } - - for (String entry : request.getTransientReset()) { - Set strings = transientSettings.internalMap().keySet(); - Set keysToRemove = new HashSet<>(); - for (String key : strings) { - if (Regex.simpleMatch(entry, key)) { - keysToRemove.add(key); - } - } - for (String keyToRemove : keysToRemove) { - transientSettings.remove(keyToRemove); - transientUpdates.remove(keyToRemove); - } - changed |= keysToRemove.isEmpty() == false; - } - - - if (!changed) { - return currentState; - } - - MetaData.Builder metaData = MetaData.builder(currentState.metaData()) - .persistentSettings(persistentSettings.build()) - .transientSettings(transientSettings.build()); - - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) || MetaData.SETTING_READ_ONLY_SETTING.get(metaData.transientSettings()); - if (updatedReadOnly) { - blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); - } else { - blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); - } - ClusterState build = builder(currentState).metaData(metaData).blocks(blocks).build(); - Settings settings = build.metaData().settings(); - // now we try to apply things and if they are invalid we fail - // this dryRun will validate & parse settings but won't actually apply them. - clusterSettingsService.dryRun(settings); - return build; + ClusterState clusterState = updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings()); + changed = clusterState != currentState; + return clusterState; } }); } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index aae4cb2b54d..05f3cb1ff0b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -715,7 +715,7 @@ public final class Settings implements ToXContent { Builder builder = new Builder(); int numberOfSettings = in.readVInt(); for (int i = 0; i < numberOfSettings; i++) { - builder.put(in.readString(), in.readString()); + builder.put(in.readString(), in.readOptionalString()); } return builder.build(); } @@ -724,7 +724,7 @@ public final class Settings implements ToXContent { out.writeVInt(settings.getAsMap().size()); for (Map.Entry entry : settings.getAsMap().entrySet()) { out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + out.writeOptionalString(entry.getValue()); } } @@ -827,6 +827,10 @@ public final class Settings implements ToXContent { return this; } + public Builder putNull(String key) { + return put(key, (String) null); + } + /** * Sets a setting with the provided setting key and class as value. * diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java index 725c7e56949..9c2f973b96e 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java @@ -103,9 +103,9 @@ public abstract class XContentSettingsLoader implements SettingsLoader { } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { - // ignore this + serializeValue(settings, sb, path, parser, currentFieldName, true); } else { - serializeValue(settings, sb, path, parser, currentFieldName); + serializeValue(settings, sb, path, parser, currentFieldName, false); } } @@ -126,31 +126,33 @@ public abstract class XContentSettingsLoader implements SettingsLoader { } else if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { + serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), true); // ignore } else { - serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++)); + serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), false); } } } - private void serializeValue(Map settings, StringBuilder sb, List path, XContentParser parser, String fieldName) throws IOException { + private void serializeValue(Map settings, StringBuilder sb, List path, XContentParser parser, String fieldName, boolean isNull) throws IOException { sb.setLength(0); for (String pathEle : path) { sb.append(pathEle).append('.'); } sb.append(fieldName); String key = sb.toString(); - String currentValue = parser.text(); - String previousValue = settings.put(key, currentValue); - if (previousValue != null) { + String currentValue = isNull ? null : parser.text(); + + if (settings.containsKey(key)) { throw new ElasticsearchParseException( "duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]", key, parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber, - previousValue, + settings.get(key), currentValue ); } + settings.put(key, currentValue); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java new file mode 100644 index 00000000000..0de13686509 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.ClusterSettingsService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicReference; + +public class SettingsUpdaterTests extends ESTestCase { + + + public void testUpdateSetting() { + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5).build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build()); + assertNotSame(clusterState, build); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 0.5, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 4.5, 0.1); + + clusterState = updater.updateSettings(clusterState, Settings.builder().putNull("cluster.routing.*").build(), + Settings.EMPTY); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); + assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + + clusterState = updater.updateSettings(clusterState, + Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build()); + + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 10.0, 0.1); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().persistentSettings())); + assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertNull("updater only does a dryRun", index.get()); + assertNull("updater only does a dryRun", shard.get()); + } + + public void testAllOrNothing() { + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + + try { + updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + fail("all or nothing"); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [not a float] for setting [cluster.routing.allocation.balance.index]", ex.getMessage()); + } + assertNull("updater only does a dryRun", index.get()); + assertNull("updater only does a dryRun", shard.get()); + } + + public void testClusterBlock() { + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + + ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), true).build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + assertEquals(clusterState.blocks().global().size(), 1); + assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_BLOCK); + + clusterState = updater.updateSettings(build, Settings.EMPTY, + Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build()); + assertEquals(clusterState.blocks().global().size(), 0); + + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index ce76893a831..3a028fe54b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import static org.elasticsearch.common.inject.matcher.Matchers.not; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -59,6 +60,32 @@ public class ClusterSettingsIT extends ESIntegTestCase { } } + public void testDeleteIsAppliedFirst() { + DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); + + assertEquals(discoverySettings.getPublishTimeout(), DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY)); + assertTrue(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)); + + ClusterUpdateSettingsResponse response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false) + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) + .get(); + + assertAcked(response); + assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "1s"); + assertTrue(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)); + assertFalse(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); + + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*")).put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "2s")) + .get(); + assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "2s"); + assertNull(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); + } + public void testResetClusterSetting() { DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); @@ -78,7 +105,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { response = client().admin().cluster() .prepareUpdateSettings() - .addTransientResetKeys(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()) + .setTransientSettings(Settings.builder().putNull(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())) .get(); assertAcked(response); @@ -99,7 +126,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertFalse(discoverySettings.getPublishDiff()); response = client().admin().cluster() .prepareUpdateSettings() - .addTransientResetKeys(randomBoolean() ? "discovery.zen.*" : "*") + .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) .get(); assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); @@ -121,7 +148,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { response = client().admin().cluster() .prepareUpdateSettings() - .addPersistentResetKeys(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()) + .setPersistentSettings(Settings.builder().putNull((DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()))) .get(); assertAcked(response); @@ -143,7 +170,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertFalse(discoverySettings.getPublishDiff()); response = client().admin().cluster() .prepareUpdateSettings() - .addPersistentResetKeys(randomBoolean() ? "discovery.zen.*" : "*") + .setPersistentSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) .get(); assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java index 254c310daba..ae6fd79aa58 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.settings; import org.elasticsearch.test.ESTestCase; - import java.util.concurrent.atomic.AtomicInteger; public class SettingsServiceTests extends ESTestCase { diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 08f4c900597..2106bdf53b0 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -38,6 +38,31 @@ last example will be: }' -------------------------------------------------- +Resetting persistent or transient settings can be done by assigning a +`null` value. If a transient setting is reset, the persistent setting +is applied if available. Otherwise Elasticsearch will fallback to the setting +defined at the configuration file or, if not existent, to the default +value. Here is an example: + +[source,js] +-------------------------------------------------- +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "discovery.zen.minimum_master_nodes" : null + } +}' +-------------------------------------------------- + +Reset settings will not be included in the cluster response. So +the response for the last example will be: + +[source,js] +-------------------------------------------------- +{ + "persistent" : {}, + "transient" : {} +} + Cluster wide settings can be returned using: [source,js] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml new file mode 100644 index 00000000000..069f7921873 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml @@ -0,0 +1,31 @@ +--- +"Test reset cluster settings": + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: false + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: null + flat_settings: true + + - match: {persistent: {}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {}} \ No newline at end of file From 85029263270829d700e7ac73bb78d8521a1c8d0a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Dec 2015 21:35:49 +0100 Subject: [PATCH 004/322] fuck you linefeed --- .../rest-api-spec/test/cluster.put_settings/11_reset.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml index 069f7921873..4162296532d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml @@ -28,4 +28,4 @@ cluster.get_settings: flat_settings: true - - match: {persistent: {}} \ No newline at end of file + - match: {persistent: {}} From 44df6814d490190f34ed3d15d46b932fb8d51a25 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Dec 2015 11:01:41 +0100 Subject: [PATCH 005/322] fix imports --- .../admin/cluster/settings/ClusterUpdateSettingsRequest.java | 2 +- .../cluster/settings/ClusterUpdateSettingsRequestBuilder.java | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 1f6bf871101..0090d7db057 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; -import java.util.*; +import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java index 39ab0adc72f..f0492edfeb1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java @@ -23,8 +23,6 @@ import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.settings.Settings; -import java.util.Arrays; -import java.util.Collection; import java.util.Map; /** From dfb8bf658bd9dc4722d667e5bb710c915cda43c7 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Dec 2015 15:22:19 +0100 Subject: [PATCH 006/322] expose default settings on `/_cluster/settings?defaults=true` --- .../common/settings/ClusterSettings.java | 15 +++++ .../RestClusterGetSettingsAction.java | 46 ++++++++++----- .../common/settings/ClusterSettingsTests.java | 57 +++++++++++++++++++ 3 files changed, 104 insertions(+), 14 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 220f2c5a164..5fd0b60dbcd 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -109,6 +109,20 @@ public final class ClusterSettings { return defaults; } + /** + * Returns a settings object that contains all clustersettings that are not + * already set in the given source. + */ + public Settings diff(Settings source) { + Settings.Builder builder = Settings.builder(); + for (Setting setting : keySettings.values()) { + if (setting.exists(source) == false) { + builder.put(setting.getKey(), setting.getRaw(source)); + } + } + return builder.build(); + } + public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, @@ -167,4 +181,5 @@ public final class ClusterSettings { TransportService.TRACE_LOG_INCLUDE_SETTING, TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING))); + } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index a1cfdb48ddb..fe87edab4f4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -23,19 +23,27 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestBuilderListener; +import java.io.IOException; + /** */ public class RestClusterGetSettingsAction extends BaseRestHandler { + private final ClusterSettings clusterSettings; + @Inject - public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client) { + public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client, ClusterSettings clusterSettings) { super(settings, controller, client); + this.clusterSettings = clusterSettings; controller.registerHandler(RestRequest.Method.GET, "/_cluster/settings", this); } @@ -44,24 +52,34 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest() .routingTable(false) .nodes(false); + final boolean renderDefaults = request.paramAsBoolean("defaults", false); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(ClusterStateResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - - builder.startObject("persistent"); - response.getState().metaData().persistentSettings().toXContent(builder, request); - builder.endObject(); - - builder.startObject("transient"); - response.getState().metaData().transientSettings().toXContent(builder, request); - builder.endObject(); - - builder.endObject(); - - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(RestStatus.OK, renderResponse(clusterSettings, response.getState(), renderDefaults, builder, request)); } }); } + + private static XContentBuilder renderResponse(ClusterSettings settings, ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + builder.startObject("persistent"); + state.metaData().persistentSettings().toXContent(builder, params); + builder.endObject(); + + builder.startObject("transient"); + state.metaData().transientSettings().toXContent(builder, params); + builder.endObject(); + + if (renderDefaults) { + builder.startObject("defaults"); + settings.diff(state.metaData().settings()).toXContent(builder, params); + builder.endObject(); + } + + builder.endObject(); + return builder; + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java new file mode 100644 index 00000000000..a504e0ad727 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; + +/** + */ +public class ClusterSettingsTests extends ESTestCase { + + public void testGet() { + ClusterSettings settings = new ClusterSettings(); + Setting setting = settings.get("cluster.routing.allocation.require.value"); + assertEquals(setting, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING); + + setting = settings.get("cluster.routing.allocation.total_shards_per_node"); + assertEquals(setting, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING); + } + + public void testIsDynamic(){ + ClusterSettings settings = new ClusterSettings(new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster)))); + assertFalse(settings.hasDynamicSetting("foo.bar.baz")); + assertTrue(settings.hasDynamicSetting("foo.bar")); + assertNotNull(settings.get("foo.bar.baz")); + } + + public void testDiff() throws IOException { + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster); + Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); + ClusterSettings settings = new ClusterSettings(new HashSet<>(Arrays.asList(foobar, foobarbaz))); + Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build()); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(1)); + } +} From 5d7f4ef3943dcbd14e85021075f59c9b7cc7b262 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Dec 2015 15:27:11 +0100 Subject: [PATCH 007/322] include default settigns --- .../common/settings/ClusterSettings.java | 18 ++++-------------- .../settings/RestClusterGetSettingsAction.java | 6 +++--- .../common/settings/ClusterSettingsTests.java | 6 +++++- 3 files changed, 12 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 5fd0b60dbcd..5392fe47529 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -48,10 +48,8 @@ public final class ClusterSettings { private final Map> groupSettings = new HashMap<>(); private final Map> keySettings = new HashMap<>(); - private final Settings defaults; public ClusterSettings(Set> settingsSet) { - Settings.Builder builder = Settings.builder(); for (Setting entry : settingsSet) { if (entry.getScope() != Setting.Scope.Cluster) { throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); @@ -61,9 +59,7 @@ public final class ClusterSettings { } else { keySettings.put(entry.getKey(), entry); } - builder.put(entry.getKey(), entry.getDefault(Settings.EMPTY)); } - this.defaults = builder.build(); } public ClusterSettings() { @@ -102,22 +98,16 @@ public final class ClusterSettings { return key.startsWith("logger."); } - /** - * Returns the cluster settings defaults - */ - public Settings getDefaults() { - return defaults; - } - /** * Returns a settings object that contains all clustersettings that are not - * already set in the given source. + * already set in the given source. The diff contains either the default value for each + * setting or the settings value in the given default settings. */ - public Settings diff(Settings source) { + public Settings diff(Settings source, Settings defaultSettings) { Settings.Builder builder = Settings.builder(); for (Setting setting : keySettings.values()) { if (setting.exists(source) == false) { - builder.put(setting.getKey(), setting.getRaw(source)); + builder.put(setting.getKey(), setting.getRaw(defaultSettings)); } } return builder.build(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index fe87edab4f4..b7b5064c096 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -57,12 +57,12 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(ClusterStateResponse response, XContentBuilder builder) throws Exception { - return new BytesRestResponse(RestStatus.OK, renderResponse(clusterSettings, response.getState(), renderDefaults, builder, request)); + return new BytesRestResponse(RestStatus.OK, renderResponse(response.getState(), renderDefaults, builder, request)); } }); } - private static XContentBuilder renderResponse(ClusterSettings settings, ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params) throws IOException { + private XContentBuilder renderResponse(ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.startObject("persistent"); @@ -75,7 +75,7 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { if (renderDefaults) { builder.startObject("defaults"); - settings.diff(state.metaData().settings()).toXContent(builder, params); + clusterSettings.diff(state.metaData().settings(), this.settings).toXContent(builder, params); builder.endObject(); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java index a504e0ad727..28a639e8903 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java @@ -50,8 +50,12 @@ public class ClusterSettingsTests extends ESTestCase { Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster); Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); ClusterSettings settings = new ClusterSettings(new HashSet<>(Arrays.asList(foobar, foobarbaz))); - Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build()); + Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(diff.getAsMap().size(), 1); assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(1)); + + diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).build()); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(17)); } } From 6c7e5069d4aa841646e3069466fa93c945accfc8 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Dec 2015 21:50:24 +0100 Subject: [PATCH 008/322] apply review comments from @nik9000 --- .../allocator/BalancedShardsAllocator.java | 30 ++--- .../decider/DiskThresholdDecider.java | 2 +- .../common/settings/ClusterSettings.java | 114 +++++++++--------- .../common/settings/Setting.java | 30 ++++- .../common/settings/SettingsService.java | 18 +-- .../HierarchyCircuitBreakerService.java | 4 +- .../common/unit/TimeValueTests.java | 14 +-- 7 files changed, 108 insertions(+), 104 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index e99e5c84ecb..d3916692898 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -74,18 +74,12 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; */ public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { + public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, Setting.Scope.Cluster); + public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, Setting.Scope.Cluster); + public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, Setting.Scope.Cluster); - public static final float DEFAULT_INDEX_BALANCE_FACTOR = 0.55f; - public static final float DEFAULT_SHARD_BALANCE_FACTOR = 0.45f; - public static final float DEFAULT_THRESHOLD = 1.0f; - - public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", DEFAULT_INDEX_BALANCE_FACTOR, true, Setting.Scope.Cluster); - public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", DEFAULT_SHARD_BALANCE_FACTOR, true, Setting.Scope.Cluster); - public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", DEFAULT_THRESHOLD, true, Setting.Scope.Cluster); - - private volatile WeightFunction weightFunction = new WeightFunction(DEFAULT_INDEX_BALANCE_FACTOR, DEFAULT_SHARD_BALANCE_FACTOR); - - private volatile float threshold = DEFAULT_THRESHOLD; + private volatile WeightFunction weightFunction; + private volatile float threshold; public BalancedShardsAllocator(Settings settings) { this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); @@ -94,26 +88,18 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards @Inject public BalancedShardsAllocator(Settings settings, ClusterSettingsService clusterSettingsService) { super(settings); - setIndexBalance(INDEX_BALANCE_FACTOR_SETTING.get(settings)); - setShardBalance(SHARD_BALANCE_FACTOR_SETTING.get(settings)); + weightFunction = new WeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); setThreshold(THRESHOLD_SETTING.get(settings)); - clusterSettingsService.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, this::setIndexBalance); - clusterSettingsService.addSettingsUpdateConsumer(SHARD_BALANCE_FACTOR_SETTING, this::setShardBalance); + clusterSettingsService.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); clusterSettingsService.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); } - public void setIndexBalance(float indexBalance) { + public void setWeightFunction(float indexBalance, float shardBalanceFactor) { weightFunction = new WeightFunction(indexBalance, weightFunction.shardBalance); } - public void setShardBalance(float shardBalanceFactor) { - weightFunction = new WeightFunction(weightFunction.indexBalance, shardBalanceFactor); - } public void setThreshold(float threshold) { - if (threshold <= 0.0f) { - throw new IllegalArgumentException("threshold must be greater than 0.0f but was: " + threshold); - } this.threshold = threshold; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 40a8442ca3a..abfc3d13018 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -84,7 +84,7 @@ public class DiskThresholdDecider extends AllocationDecider { public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.Cluster); public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "_na_", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "_na_", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "_na_", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.Cluster); public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.Cluster);; public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 5392fe47529..7c5da7c5074 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -114,62 +114,62 @@ public final class ClusterSettings { } public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, - AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, - BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, - BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, - BalancedShardsAllocator.THRESHOLD_SETTING, - ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, - ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, - EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, - EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, - ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING, - FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, - FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, - FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, - IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, - IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, - IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, - MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, - MetaData.SETTING_READ_ONLY_SETTING, - RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, - RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS_SETTING, - RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, - RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING, - RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, - RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, - RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, - RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, - RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, - RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, - ThreadPool.THREADPOOL_GROUP_SETTING, - ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, - ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, - SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, - DestructiveOperations.REQUIRES_NAME_SETTING, - DiscoverySettings.PUBLISH_TIMEOUT_SETTING, - DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, - DiscoverySettings.COMMIT_TIMEOUT_SETTING, - DiscoverySettings.NO_MASTER_BLOCK_SETTING, - HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, - HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, - InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, - SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, - ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, - TransportService.TRACE_LOG_EXCLUDE_SETTING, - TransportService.TRACE_LOG_INCLUDE_SETTING, - TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, - ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING))); + AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, + BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.THRESHOLD_SETTING, + ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, + ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, + ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, + IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, + MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, + MetaData.SETTING_READ_ONLY_SETTING, + RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, + RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS_SETTING, + RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, + RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING, + RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, + RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, + RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, + ThreadPool.THREADPOOL_GROUP_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, + SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, + DestructiveOperations.REQUIRES_NAME_SETTING, + DiscoverySettings.PUBLISH_TIMEOUT_SETTING, + DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, + DiscoverySettings.COMMIT_TIMEOUT_SETTING, + DiscoverySettings.NO_MASTER_BLOCK_SETTING, + HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, + InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, + TransportService.TRACE_LOG_EXCLUDE_SETTING, + TransportService.TRACE_LOG_INCLUDE_SETTING, + TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, + ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 169867ef384..ed635133853 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -241,7 +241,6 @@ public class Setting extends ToXContentToBytes { if (accept.test(inst) == false) { throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + getRaw(settings) + "]"); } - logger.info("update [{}] from [{}] to [{}]", key, value, getRaw(settings)); pendingValue = newValue; valueInstance = inst; commitPending = true; @@ -254,6 +253,7 @@ public class Setting extends ToXContentToBytes { public void apply() { if (commitPending) { + logger.info("update [{}] from [{}] to [{}]", key, value, pendingValue); value = pendingValue; consumer.accept(valueInstance); } @@ -283,6 +283,16 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, "_na_", (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); } + public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Float.toString(defaultValue), (s) -> { + float value = Float.parseFloat(s); + if (value < minValue) { + throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return value; + }, dynamic, scope); + } + public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { return new Setting<>(key, "_na_", (s) -> Integer.toString(defaultValue), Integer::parseInt, dynamic, scope); } @@ -304,7 +314,9 @@ public class Setting extends ToXContentToBytes { } public static Setting groupSetting(String key, boolean dynamic, Scope scope) { - String prefix = key.endsWith(".") ? key : key + "."; + if (key.endsWith(".") == false) { + throw new IllegalArgumentException("key must end with a '.'"); + } return new Setting(key, "_na_", "", (s) -> null, dynamic, scope) { @Override @@ -314,12 +326,12 @@ public class Setting extends ToXContentToBytes { @Override public Settings get(Settings settings) { - return settings.getByPrefix(prefix); + return settings.getByPrefix(key); } @Override public boolean match(String toTest) { - return Regex.simpleMatch(prefix + "*", toTest); + return Regex.simpleMatch(key + "*", toTest); } @Override @@ -387,8 +399,14 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, "_na_", (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope); } - public static Setting nonNegativeDouble(String key, double defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Double.toString(defaultValue), Double::parseDouble, dynamic, scope); + public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, "_na_", (s) -> Double.toString(defaultValue), (s) -> { + final double d = Double.parseDouble(s); + if (d < minValue) { + throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return d; + }, dynamic, scope); } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java index b1a1319e800..91ecd844190 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsService.java @@ -66,7 +66,7 @@ public abstract class SettingsService extends AbstractComponent { try { settingUpdater.rollback(); } catch (Exception e) { - logger.warn("failed to rollback settings for [{}]", e, settingUpdater); + logger.error("failed to rollback settings for [{}]", e, settingUpdater); } } } @@ -77,15 +77,15 @@ public abstract class SettingsService extends AbstractComponent { * Applies the given settings to all the settings consumers or to none of them. The settings * will be merged with the node settings before they are applied while given settings override existing node * settings. - * @param settings the settings to apply + * @param newSettings the settings to apply * @return the unmerged applied settings */ - public synchronized Settings applySettings(Settings settings) { - if (lastSettingsApplied != null && settings.equals(lastSettingsApplied)) { + public synchronized Settings applySettings(Settings newSettings) { + if (lastSettingsApplied != null && newSettings.equals(lastSettingsApplied)) { // nothing changed in the settings, ignore - return settings; + return newSettings; } - final Settings build = Settings.builder().put(this.settings).put(settings).build(); + final Settings build = Settings.builder().put(this.settings).put(newSettings).build(); boolean success = false; try { for (SettingUpdater settingUpdater : settingUpdaters) { @@ -109,14 +109,14 @@ public abstract class SettingsService extends AbstractComponent { try { settingUpdater.rollback(); } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, settingUpdater); + logger.error("failed to refresh settings for [{}]", e, settingUpdater); } } } } try { - for (Map.Entry entry : settings.getAsMap().entrySet()) { + for (Map.Entry entry : newSettings.getAsMap().entrySet()) { if (entry.getKey().startsWith("logger.")) { String component = entry.getKey().substring("logger.".length()); if ("_root".equals(component)) { @@ -130,7 +130,7 @@ public abstract class SettingsService extends AbstractComponent { logger.warn("failed to refresh settings for [{}]", e, "logger"); } - return lastSettingsApplied = settings; + return lastSettingsApplied = newSettings; } /** diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index e3837fb391c..8e4886f4ac9 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -49,11 +49,11 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.Cluster); public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.Cluster); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.nonNegativeDouble("indices.breaker.fielddata.overhead", 1.03d, true, Setting.Scope.Cluster); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.Cluster); public static final String FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.fielddata.type"; public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.Cluster); - public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.nonNegativeDouble("indices.breaker.request.overhead", 1.0d, true, Setting.Scope.Cluster); + public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.Cluster); public static final String REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.request.type"; public static final String DEFAULT_BREAKER_TYPE = "memory"; diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index 4220973ae21..2945d86fe59 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -164,12 +164,12 @@ public class TimeValueTests extends ESTestCase { } public void testToStringRep() { - assertThat("-1", equalTo(new TimeValue(-1).getStringRep())); - assertThat("10ms", equalTo(new TimeValue(10, TimeUnit.MILLISECONDS).getStringRep())); - assertThat("1533ms", equalTo(new TimeValue(1533, TimeUnit.MILLISECONDS).getStringRep())); - assertThat("90s", equalTo(new TimeValue(90, TimeUnit.SECONDS).getStringRep())); - assertThat("90m", equalTo(new TimeValue(90, TimeUnit.MINUTES).getStringRep())); - assertThat("36h", equalTo(new TimeValue(36, TimeUnit.HOURS).getStringRep())); - assertThat("1000d", equalTo(new TimeValue(1000, TimeUnit.DAYS).getStringRep())); + assertEquals("-1", new TimeValue(-1).getStringRep()); + assertEquals("10ms", new TimeValue(10, TimeUnit.MILLISECONDS).getStringRep()); + assertEquals("1533ms", new TimeValue(1533, TimeUnit.MILLISECONDS).getStringRep()); + assertEquals("90s", new TimeValue(90, TimeUnit.SECONDS).getStringRep()); + assertEquals("90m", new TimeValue(90, TimeUnit.MINUTES).getStringRep()); + assertEquals("36h", new TimeValue(36, TimeUnit.HOURS).getStringRep()); + assertEquals("1000d", new TimeValue(1000, TimeUnit.DAYS).getStringRep()); } } From edd98b0e6cdf3bd2231ff6cbe503c19fe3b829c0 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Dec 2015 22:15:08 +0100 Subject: [PATCH 009/322] remove descritpion for now --- .../decider/AwarenessAllocationDecider.java | 2 +- .../ClusterRebalanceAllocationDecider.java | 6 +-- .../decider/DiskThresholdDecider.java | 4 +- .../decider/EnableAllocationDecider.java | 4 +- .../decider/ThrottlingAllocationDecider.java | 2 +- .../common/settings/Setting.java | 38 +++++++------------ .../discovery/DiscoverySettings.java | 4 +- .../index/store/IndexStoreConfig.java | 2 +- .../transport/TransportService.java | 4 +- .../common/settings/SettingTests.java | 4 +- 10 files changed, 30 insertions(+), 40 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 450b953fd38..624367003fa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -78,7 +78,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "_na_", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.Cluster); private String[] awarenessAttributes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index f20489a795e..fe481bcabca 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -37,10 +37,10 @@ import java.util.Locale; *
    *
  • indices_primaries_active - Re-balancing is allowed only once all * primary shards on all indices are active.
  • - * + * *
  • indices_all_active - Re-balancing is allowed only once all * shards on all indices are active.
  • - * + * *
  • always - Re-balancing is allowed once a shard replication group * is active
  • *
@@ -48,7 +48,7 @@ import java.util.Locale; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", "_na_", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.Cluster); /** * An enum representation for the configured re-balance type. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index abfc3d13018..bdbb9a16902 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -83,8 +83,8 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile TimeValue rerouteInterval; public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "_na_", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "_na_", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.Cluster); public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.Cluster);; public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 38183fab830..b32b953441a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -59,10 +59,10 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", "_na_", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.Cluster); public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; - public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", "_na_", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.Cluster); public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; private volatile Rebalance enableRebalance; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 543f0bf780b..7f40667fbf5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -53,7 +53,7 @@ public class ThrottlingAllocationDecider extends AllocationDecider { public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", "_na_", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.Cluster); private volatile int primariesInitialRecoveries; private volatile int concurrentRecoveries; diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index ed635133853..1230936b6d0 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -39,15 +39,13 @@ import java.util.function.Predicate; */ public class Setting extends ToXContentToBytes { private final String key; - private final String description; private final Function defaultValue; private final Function parser; private final boolean dynamic; private final Scope scope; - public Setting(String key, String description, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { this.key = key; - this.description = description; this.defaultValue = defaultValue; this.parser = parser; this.dynamic = dynamic; @@ -62,13 +60,6 @@ public class Setting extends ToXContentToBytes { return key; } - /** - * Returns a human readable description of this setting - */ - public String getDescription() { - return description; - } - /** * Returns true iff this setting is dynamically updateable, otherwise false */ @@ -117,7 +108,7 @@ public class Setting extends ToXContentToBytes { return parser.apply(value); } catch (ElasticsearchParseException ex) { throw ex; - } catch (Throwable t) { + } catch (Exception t) { throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", t); } } @@ -143,7 +134,6 @@ public class Setting extends ToXContentToBytes { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("key", key); - builder.field("description", description); builder.field("type", scope.name()); builder.field("dynamic", dynamic); builder.field("default", defaultValue.apply(Settings.EMPTY)); @@ -275,16 +265,16 @@ public class Setting extends ToXContentToBytes { } - public Setting(String key, String description, String defaultValue, Function parser, boolean dynamic, Scope scope) { - this(key, description, (s) -> defaultValue, parser, dynamic, scope); + public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope) { + this(key, (s) -> defaultValue, parser, dynamic, scope); } public static Setting floatSetting(String key, float defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); + return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); } public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Float.toString(defaultValue), (s) -> { + return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> { float value = Float.parseFloat(s); if (value < minValue) { throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); @@ -294,19 +284,19 @@ public class Setting extends ToXContentToBytes { } public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Integer.toString(defaultValue), Integer::parseInt, dynamic, scope); + return new Setting<>(key, (s) -> Integer.toString(defaultValue), Integer::parseInt, dynamic, scope); } public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); } public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); + return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); } public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> value.toString(), (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope); + return new Setting<>(key, (s) -> value.toString(), (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope); } public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { @@ -317,7 +307,7 @@ public class Setting extends ToXContentToBytes { if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); } - return new Setting(key, "_na_", "", (s) -> null, dynamic, scope) { + return new Setting(key, "", (s) -> null, dynamic, scope) { @Override public boolean isGroupSetting() { @@ -382,7 +372,7 @@ public class Setting extends ToXContentToBytes { } public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", defaultValue, (s) -> { + return new Setting<>(key, defaultValue, (s) -> { TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); if (timeValue.millis() < minValue.millis()) { throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); @@ -396,11 +386,11 @@ public class Setting extends ToXContentToBytes { } public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope); + return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope); } public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, Scope scope) { - return new Setting<>(key, "_na_", (s) -> Double.toString(defaultValue), (s) -> { + return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> { final double d = Double.parseDouble(s); if (d < minValue) { throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 829a173060d..745c4760d54 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -46,8 +46,8 @@ public class DiscoverySettings extends AbstractComponent { * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing * to receive responses within this window will cause the cluster state change to be rejected. */ - public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", "_na_", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.Cluster); - public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "_na_", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.Cluster); + public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.Cluster); + public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.Cluster); public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.Cluster); public final static int NO_MASTER_BLOCK_ID = 2; diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 48f24613f6f..0588dafd310 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -36,7 +36,7 @@ public class IndexStoreConfig{ /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ - public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", "_na_", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.Cluster); + public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.Cluster); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index f9cbb012b2a..d0d5b5e230b 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -87,8 +87,8 @@ public class TransportService extends AbstractLifecycleComponent TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "_na_", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); - public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "_na_", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster);; + public static final Setting TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); + public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster);; private final ESLogger tracerLog; diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 08ec33d0b43..5787ae59f56 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -113,14 +113,14 @@ public class SettingTests extends ESTestCase { assertEquals(defautlValue.millis() + "ms", aDefault); assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); - Setting secondaryDefault = new Setting<>("foo.bar", "_na_", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.Cluster); + Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.Cluster); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = new Setting<>("foo.bar", "", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.Cluster); + Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.Cluster); assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); From c9d7c922437afb15aecd9b47230f76440eef1c1b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 09:57:39 +0100 Subject: [PATCH 010/322] fold ClusterSettingsService into ClusterSettings --- .../cluster/settings/SettingsUpdater.java | 13 ++-- .../TransportClusterUpdateSettingsAction.java | 10 +-- .../close/TransportCloseIndexAction.java | 6 +- .../delete/TransportDeleteIndexAction.java | 4 +- .../open/TransportOpenIndexAction.java | 3 +- .../action/support/DestructiveOperations.java | 6 +- .../elasticsearch/cluster/ClusterModule.java | 5 +- .../cluster/InternalClusterInfoService.java | 10 +-- .../action/index/MappingUpdatedAction.java | 6 +- .../allocator/BalancedShardsAllocator.java | 9 +-- .../decider/AwarenessAllocationDecider.java | 9 +-- .../ClusterRebalanceAllocationDecider.java | 6 +- .../ConcurrentRebalanceAllocationDecider.java | 6 +- .../decider/DiskThresholdDecider.java | 15 ++-- .../decider/EnableAllocationDecider.java | 8 +- .../decider/FilterAllocationDecider.java | 10 +-- .../decider/ShardsLimitAllocationDecider.java | 6 +- .../SnapshotInProgressAllocationDecider.java | 7 +- .../decider/ThrottlingAllocationDecider.java | 8 +- .../service/InternalClusterService.java | 12 +-- ...rvice.java => AbstractScopedSettings.java} | 74 ++++++++++++++++--- .../common/settings/ClusterSettings.java | 64 +--------------- .../settings/ClusterSettingsService.java | 53 ------------- .../common/settings/Setting.java | 18 ++--- .../discovery/DiscoverySettings.java | 12 +-- .../discovery/zen/ZenDiscovery.java | 8 +- .../elasticsearch/indices/IndicesService.java | 8 +- .../HierarchyCircuitBreakerService.java | 10 +-- .../indices/recovery/RecoverySettings.java | 28 +++---- .../indices/ttl/IndicesTTLService.java | 6 +- .../java/org/elasticsearch/node/Node.java | 5 +- .../org/elasticsearch/node/NodeModule.java | 1 - .../elasticsearch/search/SearchService.java | 6 +- .../snapshots/RestoreService.java | 9 +-- .../elasticsearch/threadpool/ThreadPool.java | 6 +- .../transport/TransportService.java | 8 +- .../settings/SettingsUpdaterTests.java | 8 +- .../cluster/ClusterModuleTests.java | 5 +- .../allocation/BalanceConfigurationTests.java | 5 +- .../DiskThresholdDeciderUnitTests.java | 7 +- .../decider/EnableAllocationTests.java | 13 ++-- .../breaker/MemoryCircuitBreakerTests.java | 5 +- .../common/settings/ClusterSettingsTests.java | 61 --------------- ...iceTests.java => ScopedSettingsTests.java} | 61 +++++++++------ .../common/settings/SettingTests.java | 16 ++-- .../common/util/BigArraysTests.java | 5 +- .../zen/NodeJoinControllerTests.java | 3 +- .../PublishClusterStateActionTests.java | 3 +- .../breaker/CircuitBreakerUnitTests.java | 3 +- .../recovery/RecoverySourceHandlerTests.java | 3 +- .../UpdateThreadPoolSettingsTests.java | 47 ++++++------ .../AbstractSimpleTransportTestCase.java | 3 +- .../NettySizeHeaderFrameDecoderTests.java | 3 +- .../discovery/azure/AzureDiscovery.java | 1 - .../discovery/ec2/Ec2Discovery.java | 1 - .../discovery/gce/GceDiscovery.java | 1 - .../MockInternalClusterInfoService.java | 6 +- .../search/MockSearchService.java | 8 +- .../test/ESAllocationTestCase.java | 17 ++--- 59 files changed, 313 insertions(+), 447 deletions(-) rename core/src/main/java/org/elasticsearch/common/settings/{SettingsService.java => AbstractScopedSettings.java} (75%) delete mode 100644 core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java delete mode 100644 core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java rename core/src/test/java/org/elasticsearch/common/settings/{SettingsServiceTests.java => ScopedSettingsTests.java} (65%) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index a6b61844a1e..f5020a46b37 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.common.settings.Settings; import java.util.HashSet; @@ -40,12 +39,10 @@ import static org.elasticsearch.cluster.ClusterState.builder; final class SettingsUpdater { final Settings.Builder transientUpdates = Settings.settingsBuilder(); final Settings.Builder persistentUpdates = Settings.settingsBuilder(); - private final ClusterSettings dynamicSettings; - private final ClusterSettingsService clusterSettingsService; + private final ClusterSettings clusterSettings; - SettingsUpdater(ClusterSettingsService clusterSettingsService) { - this.dynamicSettings = clusterSettingsService.getClusterSettings(); - this.clusterSettingsService = clusterSettingsService; + SettingsUpdater(ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; } synchronized Settings getTransientUpdates() { @@ -85,7 +82,7 @@ final class SettingsUpdater { Settings settings = build.metaData().settings(); // now we try to apply things and if they are invalid we fail // this dryRun will validate & parse settings but won't actually apply them. - clusterSettingsService.dryRun(settings); + clusterSettings.dryRun(settings); return build; } @@ -96,7 +93,7 @@ final class SettingsUpdater { for (Map.Entry entry : toApply.getAsMap().entrySet()) { if (entry.getValue() == null) { toRemove.add(entry.getKey()); - } else if (dynamicSettings.isLoggerSetting(entry.getKey()) || dynamicSettings.hasDynamicSetting(entry.getKey())) { + } else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) { settingsBuilder.put(entry.getKey(), entry.getValue()); updates.put(entry.getKey(), entry.getValue()); changed = true; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 9170b9168b1..99815b77ff7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -36,8 +36,8 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -50,14 +50,14 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct private final AllocationService allocationService; - private final ClusterSettingsService clusterSettingsService; + private final ClusterSettings clusterSettings; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - AllocationService allocationService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettingsService clusterSettingsService) { + AllocationService allocationService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettings clusterSettings) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new); this.allocationService = allocationService; - this.clusterSettingsService = clusterSettingsService; + this.clusterSettings = clusterSettings; } @Override @@ -83,7 +83,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener listener) { - final SettingsUpdater updater = new SettingsUpdater(clusterSettingsService); + final SettingsUpdater updater = new SettingsUpdater(clusterSettings); clusterService.submitStateUpdateTask("cluster_update_settings", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 994fdcced1f..57c9fdbf0da 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -31,9 +31,9 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -50,13 +50,13 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction(clusterDynamicSettings.values()))); - bind(ClusterSettingsService.class).toInstance(clusterSettingsService); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); + bind(ClusterSettings.class).toInstance(clusterSettings); } diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 13fe2472bd4..377d6578ac1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -37,12 +37,12 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; @@ -83,7 +83,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private final List listeners = new CopyOnWriteArrayList<>(); @Inject - public InternalClusterInfoService(Settings settings, ClusterSettingsService clusterSettingsService, + public InternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { @@ -99,9 +99,9 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu this.updateFrequency = INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.get(settings); this.fetchTimeout = INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.get(settings); this.enabled = DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, this::setFetchTimeout); - clusterSettingsService.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, this::setUpdateFrequency); - clusterSettingsService.addSettingsUpdateConsumer(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); + clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, this::setFetchTimeout); + clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, this::setUpdateFrequency); + clusterSettings.addSettingsUpdateConsumer(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); // Add InternalClusterInfoService to listen for Master changes this.clusterService.add((LocalNodeMasterListener)this); diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 04fea06138e..7df0e24210e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -26,12 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.concurrent.TimeoutException; @@ -47,10 +47,10 @@ public class MappingUpdatedAction extends AbstractComponent { private volatile TimeValue dynamicMappingUpdateTimeout; @Inject - public MappingUpdatedAction(Settings settings, ClusterSettingsService clusterSettingsService) { + public MappingUpdatedAction(Settings settings, ClusterSettings clusterSettings) { super(settings); this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); } private void setDynamicMappingUpdateTimeout(TimeValue dynamicMappingUpdateTimeout) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index d3916692898..f52b3228c47 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.ArrayList; import java.util.Collection; @@ -82,16 +81,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private volatile float threshold; public BalancedShardsAllocator(Settings settings) { - this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public BalancedShardsAllocator(Settings settings, ClusterSettingsService clusterSettingsService) { + public BalancedShardsAllocator(Settings settings, ClusterSettings clusterSettings) { super(settings); weightFunction = new WeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); setThreshold(THRESHOLD_SETTING.get(settings)); - clusterSettingsService.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); - clusterSettingsService.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); + clusterSettings.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); + clusterSettings.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); } public void setWeightFunction(float indexBalance, float shardBalanceFactor) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 624367003fa..148fdd82f35 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.HashMap; import java.util.Map; @@ -98,16 +97,16 @@ public class AwarenessAllocationDecider extends AllocationDecider { * @param settings {@link Settings} to use */ public AwarenessAllocationDecider(Settings settings) { - this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public AwarenessAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes); setForcedAwarenessAttributes(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.get(settings)); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, this::setForcedAwarenessAttributes); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, this::setForcedAwarenessAttributes); } private void setForcedAwarenessAttributes(Settings forceSettings) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index fe481bcabca..036695bee46 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -22,9 +22,9 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Locale; @@ -82,7 +82,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { private ClusterRebalanceType type; @Inject - public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); try { type = CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.get(settings); @@ -92,7 +92,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { } logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, type.toString().toLowerCase(Locale.ROOT)); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType); } public void setType(ClusterRebalanceType type) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 040c4481951..7843a31ff91 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -22,9 +22,9 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; /** * Similar to the {@link ClusterRebalanceAllocationDecider} this @@ -46,11 +46,11 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { private volatile int clusterConcurrentRebalance; @Inject - public ConcurrentRebalanceAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public ConcurrentRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.clusterConcurrentRebalance = CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.get(settings); logger.debug("using [cluster_concurrent_rebalance] with [{}]", clusterConcurrentRebalance); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, this::setClusterConcurrentRebalance); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, this::setClusterConcurrentRebalance); } public void setClusterConcurrentRebalance(int concurrentRebalance) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index bdbb9a16902..5ffaa1f0e84 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Set; @@ -191,11 +190,11 @@ public class DiskThresholdDecider extends AllocationDecider { // It's okay the Client is null here, because the empty cluster info // service will never actually call the listener where the client is // needed. Also this constructor is only used for tests - this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), EmptyClusterInfoService.INSTANCE, null); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), EmptyClusterInfoService.INSTANCE, null); } @Inject - public DiskThresholdDecider(Settings settings, ClusterSettingsService clusterSettingsService, ClusterInfoService infoService, Client client) { + public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings, ClusterInfoService infoService, Client client) { super(settings); final String lowWatermark = CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.get(settings); final String highWatermark = CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.get(settings); @@ -204,11 +203,11 @@ public class DiskThresholdDecider extends AllocationDecider { this.includeRelocations = CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.get(settings); this.rerouteInterval = CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.get(settings); this.enabled = CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, this::setLowWatermark); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, this::setHighWatermark); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, this::setIncludeRelocations); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, this::setRerouteInterval); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, this::setLowWatermark); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, this::setHighWatermark); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, this::setIncludeRelocations); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, this::setRerouteInterval); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); infoService.addListener(new DiskListener(client)); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index b32b953441a..61156de7137 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -23,9 +23,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.Locale; @@ -69,12 +69,12 @@ public class EnableAllocationDecider extends AllocationDecider { private volatile Allocation enableAllocation; @Inject - public EnableAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public EnableAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.enableAllocation = CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.get(settings); this.enableRebalance = CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, this::setEnableAllocation); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, this::setEnableRebalance); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, this::setEnableAllocation); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, this::setEnableRebalance); } public void setEnableRebalance(Rebalance enableRebalance) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index f9ee7ad8182..87ec158e8ca 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -25,9 +25,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; @@ -73,14 +73,14 @@ public class FilterAllocationDecider extends AllocationDecider { private volatile DiscoveryNodeFilters clusterExcludeFilters; @Inject - public FilterAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public FilterAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); setClusterRequireFilters(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.get(settings)); setClusterExcludeFilters(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.get(settings)); setClusterIncludeFilters(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.get(settings)); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, this::setClusterRequireFilters); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, this::setClusterExcludeFilters); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, this::setClusterIncludeFilters); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, this::setClusterRequireFilters); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, this::setClusterExcludeFilters); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, this::setClusterIncludeFilters); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index ecd03d92c70..3bc02879e61 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -25,9 +25,9 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; /** * This {@link AllocationDecider} limits the number of shards per node on a per @@ -68,10 +68,10 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Inject - public ShardsLimitAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public ShardsLimitAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.clusterShardLimit = CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, this::setClusterShardLimit); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, this::setClusterShardLimit); } private void setClusterShardLimit(int clusterShardLimit) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index fa447626e63..fdc65bf7ac8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; /** * This {@link org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider} prevents shards that @@ -57,14 +56,14 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { * @param settings {@link org.elasticsearch.common.settings.Settings} to use */ public SnapshotInProgressAllocationDecider(Settings settings) { - this(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public SnapshotInProgressAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public SnapshotInProgressAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); enableRelocation = CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, this::setEnableRelocation); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, this::setEnableRelocation); } private void setEnableRelocation(boolean enableRelocation) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 7f40667fbf5..f7369f35dd9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -23,9 +23,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; /** * {@link ThrottlingAllocationDecider} controls the recovery process per node in @@ -59,13 +59,13 @@ public class ThrottlingAllocationDecider extends AllocationDecider { private volatile int concurrentRecoveries; @Inject - public ThrottlingAllocationDecider(Settings settings, ClusterSettingsService clusterSettingsService) { + public ThrottlingAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.primariesInitialRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.get(settings); this.concurrentRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.get(settings); logger.debug("using node_concurrent_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentRecoveries, primariesInitialRecoveries); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, this::setPrimariesInitialRecoveries); - clusterSettingsService.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, this::setConcurrentRecoveries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, this::setPrimariesInitialRecoveries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, this::setConcurrentRecoveries); } private void setConcurrentRecoveries(int concurrentRecoveries) { diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 56bf400d46b..945b9d1ea59 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; @@ -47,7 +48,6 @@ import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -75,7 +75,7 @@ public class InternalClusterService extends AbstractLifecycleComponent settingUpdaters = new ArrayList<>(); + private final Map> groupSettings = new HashMap<>(); + private final Map> keySettings = new HashMap<>(); + private final Setting.Scope scope; - protected SettingsService(Settings settings) { + protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Scope scope) { super(settings); + for (Setting entry : settingsSet) { + if (entry.getScope() != scope) { + throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); + } + if (entry.isGroupSetting()) { + groupSettings.put(entry.getKey(), entry); + } else { + keySettings.put(entry.getKey(), entry); + } + } + this.scope = scope; + } + + public Setting.Scope getScope() { + return this.scope; } /** @@ -140,7 +156,7 @@ public abstract class SettingsService extends AbstractComponent { *

*/ public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Predicate predicate) { - if (setting != getSetting(setting.getKey())) { + if (setting != get(setting.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); } this.settingUpdaters.add(setting.newUpdater(consumer, logger, settings, predicate)); @@ -153,10 +169,10 @@ public abstract class SettingsService extends AbstractComponent { *

*/ public synchronized void addSettingsUpdateConsumer(Setting
a, Setting b, BiConsumer consumer) { - if (a != getSetting(a.getKey())) { + if (a != get(a.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + a.getKey() + "]"); } - if (b != getSetting(b.getKey())) { + if (b != get(b.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]"); } this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger, settings)); @@ -172,8 +188,6 @@ public abstract class SettingsService extends AbstractComponent { addSettingsUpdateConsumer(setting, consumer, (s) -> true); } - protected abstract Setting getSetting(String key); - /** * Transactional interface to update settings. * @see Setting @@ -198,4 +212,44 @@ public abstract class SettingsService extends AbstractComponent { void rollback(); } + /** + * Returns the {@link Setting} for the given key or null if the setting can not be found. + */ + public Setting get(String key) { + Setting setting = keySettings.get(key); + if (setting == null) { + for (Map.Entry> entry : groupSettings.entrySet()) { + if (entry.getValue().match(key)) { + return entry.getValue(); + } + } + } else { + return setting; + } + return null; + } + + /** + * Returns true if the setting for the given key is dynamically updateable. Otherwise false. + */ + public boolean hasDynamicSetting(String key) { + final Setting setting = get(key); + return setting != null && setting.isDynamic(); + } + + /** + * Returns a settings object that contains all clustersettings that are not + * already set in the given source. The diff contains either the default value for each + * setting or the settings value in the given default settings. + */ + public Settings diff(Settings source, Settings defaultSettings) { + Settings.Builder builder = Settings.builder(); + for (Setting setting : keySettings.values()) { + if (setting.exists(source) == false) { + builder.put(setting.getKey(), setting.getRaw(defaultSettings)); + } + } + return builder.build(); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 7c5da7c5074..bf9c24dad8d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -26,8 +26,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.*; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; @@ -44,51 +42,10 @@ import java.util.*; /** * Encapsulates all valid cluster level settings. */ -public final class ClusterSettings { +public final class ClusterSettings extends AbstractScopedSettings { - private final Map> groupSettings = new HashMap<>(); - private final Map> keySettings = new HashMap<>(); - - public ClusterSettings(Set> settingsSet) { - for (Setting entry : settingsSet) { - if (entry.getScope() != Setting.Scope.Cluster) { - throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); - } - if (entry.isGroupSetting()) { - groupSettings.put(entry.getKey(), entry); - } else { - keySettings.put(entry.getKey(), entry); - } - } - } - - public ClusterSettings() { - this(BUILT_IN_CLUSTER_SETTINGS); - } - - /** - * Returns the {@link Setting} for the given key or null if the setting can not be found. - */ - public Setting get(String key) { - Setting setting = keySettings.get(key); - if (setting == null) { - for (Map.Entry> entry : groupSettings.entrySet()) { - if (entry.getValue().match(key)) { - return entry.getValue(); - } - } - } else { - return setting; - } - return null; - } - - /** - * Returns true if the setting for the given key is dynamically updateable. Otherwise false. - */ - public boolean hasDynamicSetting(String key) { - final Setting setting = get(key); - return setting != null && setting.isDynamic(); + public ClusterSettings(Settings settings, Set> settingsSet) { + super(settings, settingsSet, Setting.Scope.Cluster); } /** @@ -98,20 +55,6 @@ public final class ClusterSettings { return key.startsWith("logger."); } - /** - * Returns a settings object that contains all clustersettings that are not - * already set in the given source. The diff contains either the default value for each - * setting or the settings value in the given default settings. - */ - public Settings diff(Settings source, Settings defaultSettings) { - Settings.Builder builder = Settings.builder(); - for (Setting setting : keySettings.values()) { - if (setting.exists(source) == false) { - builder.put(setting.getKey(), setting.getRaw(defaultSettings)); - } - } - return builder.build(); - } public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, @@ -171,5 +114,4 @@ public final class ClusterSettings { TransportService.TRACE_LOG_INCLUDE_SETTING, TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING))); - } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java deleted file mode 100644 index e66844d4e88..00000000000 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettingsService.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.settings; - -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.ESLoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.function.Predicate; - -/** - * A service that allows to register for node settings change that can come from cluster - * events holding new settings. - */ -public final class ClusterSettingsService extends SettingsService { - private final ClusterSettings clusterSettings; - - @Inject - public ClusterSettingsService(Settings settings, ClusterSettings clusterSettings) { - super(settings); - this.clusterSettings = clusterSettings; - } - - protected Setting getSetting(String key) { - return this.clusterSettings.get(key); - } - - public ClusterSettings getClusterSettings() { - return clusterSettings; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1230936b6d0..fda10ea226b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -149,11 +149,11 @@ public class Setting extends ToXContentToBytes { Index; } - SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { return newUpdater(consumer, logger, settings, (s) -> true); } - SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { if (isDynamic()) { return new Updater(consumer, logger, settings, accept); } else { @@ -161,12 +161,12 @@ public class Setting extends ToXContentToBytes { } } - static SettingsService.SettingUpdater compoundUpdater(final BiConsumer consumer, final Setting aSettting, final Setting bSetting, ESLogger logger, Settings settings) { + static AbstractScopedSettings.SettingUpdater compoundUpdater(final BiConsumer consumer, final Setting aSettting, final Setting bSetting, ESLogger logger, Settings settings) { final AtomicReference aRef = new AtomicReference<>(); final AtomicReference bRef = new AtomicReference<>(); - final SettingsService.SettingUpdater aSettingUpdater = aSettting.newUpdater(aRef::set, logger, settings); - final SettingsService.SettingUpdater bSettingUpdater = bSetting.newUpdater(bRef::set, logger, settings); - return new SettingsService.SettingUpdater() { + final AbstractScopedSettings.SettingUpdater aSettingUpdater = aSettting.newUpdater(aRef::set, logger, settings); + final AbstractScopedSettings.SettingUpdater bSettingUpdater = bSetting.newUpdater(bRef::set, logger, settings); + return new AbstractScopedSettings.SettingUpdater() { boolean aHasChanged = false; boolean bHasChanged = false; @Override @@ -204,7 +204,7 @@ public class Setting extends ToXContentToBytes { } - private class Updater implements SettingsService.SettingUpdater { + private class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final ESLogger logger; private final Predicate accept; @@ -325,12 +325,12 @@ public class Setting extends ToXContentToBytes { } @Override - public SettingsService.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { if (isDynamic() == false) { throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); } final Setting setting = this; - return new SettingsService.SettingUpdater() { + return new AbstractScopedSettings.SettingUpdater() { private Settings pendingSettings; private Settings committedSettings = get(settings); diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 745c4760d54..26902a70e58 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -23,10 +23,10 @@ import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.rest.RestStatus; import java.util.EnumSet; @@ -62,12 +62,12 @@ public class DiscoverySettings extends AbstractComponent { private volatile boolean publishDiff; @Inject - public DiscoverySettings(Settings settings, ClusterSettingsService clusterSettingsService) { + public DiscoverySettings(Settings settings, ClusterSettings clusterSettings) { super(settings); - clusterSettingsService.addSettingsUpdateConsumer(NO_MASTER_BLOCK_SETTING, this::setNoMasterBlock); - clusterSettingsService.addSettingsUpdateConsumer(PUBLISH_DIFF_ENABLE_SETTING, this::setPublishDiff); - clusterSettingsService.addSettingsUpdateConsumer(COMMIT_TIMEOUT_SETTING, this::setCommitTimeout); - clusterSettingsService.addSettingsUpdateConsumer(PUBLISH_TIMEOUT_SETTING, this::setPublishTimeout); + clusterSettings.addSettingsUpdateConsumer(NO_MASTER_BLOCK_SETTING, this::setNoMasterBlock); + clusterSettings.addSettingsUpdateConsumer(PUBLISH_DIFF_ENABLE_SETTING, this::setPublishDiff); + clusterSettings.addSettingsUpdateConsumer(COMMIT_TIMEOUT_SETTING, this::setCommitTimeout); + clusterSettings.addSettingsUpdateConsumer(PUBLISH_TIMEOUT_SETTING, this::setPublishTimeout); this.noMasterBlock = NO_MASTER_BLOCK_SETTING.get(settings); this.publishTimeout = PUBLISH_TIMEOUT_SETTING.get(settings); this.commitTimeout = COMMIT_TIMEOUT_SETTING.get(settings); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 7333618aef6..2c230d22599 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -56,7 +57,6 @@ import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; @@ -140,7 +140,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, - TransportService transportService, final ClusterService clusterService, ClusterSettingsService clusterSettingsService, + TransportService transportService, final ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, ElectMasterService electMasterService, DiscoverySettings discoverySettings) { super(settings); @@ -172,7 +172,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); - clusterSettingsService.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { + clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { final ClusterState clusterState = clusterService.state(); int masterNodes = clusterState.nodes().masterNodes().size(); if (value > masterNodes) { @@ -180,7 +180,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } return true; }); - clusterSettingsService.addSettingsUpdateConsumer(REJOIN_ON_MASTER_GONE_SETTING, this::setRejoingOnMasterGone); + clusterSettings.addSettingsUpdateConsumer(REJOIN_ON_MASTER_GONE_SETTING, this::setRejoingOnMasterGone); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, clusterName, clusterService); this.masterFD.addListener(new MasterNodeFailureListener()); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index ad98e3e9a8f..d8c142f478d 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -58,7 +59,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.PluginsService; import java.io.IOException; @@ -100,7 +100,7 @@ public class IndicesService extends AbstractLifecycleComponent i @Inject public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, - ClusterSettingsService clusterSettingsService, AnalysisRegistry analysisRegistry, + ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, MapperRegistry mapperRegistry) { super(settings); @@ -113,8 +113,8 @@ public class IndicesService extends AbstractLifecycleComponent i this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.mapperRegistry = mapperRegistry; - clusterSettingsService.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); - clusterSettingsService.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); + clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); + clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); } diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 8e4886f4ac9..fc80495bb1b 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -25,10 +25,10 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.util.ArrayList; import java.util.List; @@ -66,7 +66,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final AtomicLong parentTripCount = new AtomicLong(0); @Inject - public HierarchyCircuitBreakerService(Settings settings, ClusterSettingsService clusterSettingsService) { + public HierarchyCircuitBreakerService(Settings settings, ClusterSettings clusterSettings) { super(settings); this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), @@ -88,9 +88,9 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { registerBreaker(this.requestSettings); registerBreaker(this.fielddataSettings); - clusterSettingsService.addSettingsUpdateConsumer(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, this::setTotalCircuitBreakerLimit, this::validateTotalCircuitBreakerLimit); - clusterSettingsService.addSettingsUpdateConsumer(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setFieldDataBreakerLimit); - clusterSettingsService.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit); + clusterSettings.addSettingsUpdateConsumer(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, this::setTotalCircuitBreakerLimit, this::validateTotalCircuitBreakerLimit); + clusterSettings.addSettingsUpdateConsumer(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setFieldDataBreakerLimit); + clusterSettings.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit); } private void setRequestBreakerLimit(ByteSizeValue newRequestMax, Double newRequestOverhead) { long newRequestLimitBytes = newRequestMax == null ? HierarchyCircuitBreakerService.this.requestSettings.getLimit() : newRequestMax.bytes(); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index eb1c643038f..0e72dedb3ad 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -23,13 +23,13 @@ import org.apache.lucene.store.RateLimiter; import org.apache.lucene.store.RateLimiter.SimpleRateLimiter; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; @@ -95,7 +95,7 @@ public class RecoverySettings extends AbstractComponent implements Closeable { @Inject - public RecoverySettings(Settings settings, ClusterSettingsService clusterSettingsService) { + public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { super(settings); this.fileChunkSize = INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING.get(settings); this.translogOps = INDICES_RECOVERY_TRANSLOG_OPS_SETTING.get(settings); @@ -130,18 +130,18 @@ public class RecoverySettings extends AbstractComponent implements Closeable { logger.debug("using max_bytes_per_sec[{}], concurrent_streams [{}], file_chunk_size [{}], translog_size [{}], translog_ops [{}], and compress [{}]", maxBytesPerSec, concurrentStreams, fileChunkSize, translogSize, translogOps, compress); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, this::setFileChunkSize); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_OPS_SETTING, this::setTranslogOps); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, this::setTranslogSize); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_COMPRESS_SETTING, this::setCompress); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, this::setConcurrentStreams); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, this::setConcurrentSmallFileStreams); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, this::setRetryDelayStateSync); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, this::setRetryDelayNetwork); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, this::setInternalActionTimeout); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, this::setInternalActionLongTimeout); - clusterSettingsService.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setActivityTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING, this::setFileChunkSize); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_OPS_SETTING, this::setTranslogOps); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_TRANSLOG_SIZE_SETTING, this::setTranslogSize); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_COMPRESS_SETTING, this::setCompress); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, this::setConcurrentStreams); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, this::setConcurrentSmallFileStreams); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, this::setRetryDelayStateSync); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, this::setRetryDelayNetwork); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, this::setInternalActionTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, this::setInternalActionLongTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setActivityTimeout); } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 47d18105d0f..454dc460274 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -50,7 +51,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.io.IOException; import java.util.ArrayList; @@ -78,7 +78,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent imp private final ParseFieldMatcher parseFieldMatcher; @Inject - public SearchService(Settings settings, ClusterSettingsService clusterSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, + public SearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { super(settings); @@ -187,7 +187,7 @@ public class SearchService extends AbstractLifecycleComponent imp this.indicesWarmer.addListener(new SearchWarmer()); defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings); - clusterSettingsService.addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); + clusterSettings.addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); } private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index c4049573b9b..14b2680d25c 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -49,7 +49,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; @@ -125,12 +124,12 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); private final BlockingQueue updatedSnapshotStateQueue = ConcurrentCollections.newBlockingQueue(); - private final ClusterSettingsService clusterSettingsService; + private final ClusterSettings clusterSettings; @Inject public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, TransportService transportService, AllocationService allocationService, MetaDataCreateIndexService createIndexService, ClusterSettings dynamicSettings, - MetaDataIndexUpgradeService metaDataIndexUpgradeService, ClusterSettingsService clusterSettingsService) { + MetaDataIndexUpgradeService metaDataIndexUpgradeService, ClusterSettings clusterSettings) { super(settings); this.clusterService = clusterService; this.repositoriesService = repositoriesService; @@ -141,7 +140,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; transportService.registerRequestHandler(UPDATE_RESTORE_ACTION_NAME, UpdateIndexShardRestoreStatusRequest::new, ThreadPool.Names.SAME, new UpdateRestoreStateRequestHandler()); clusterService.add(this); - this.clusterSettingsService = clusterSettingsService; + this.clusterSettings = clusterSettings; } /** @@ -392,7 +391,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis if (request.includeGlobalState()) { if (metaData.persistentSettings() != null) { Settings settings = metaData.persistentSettings(); - clusterSettingsService.dryRun(settings); + clusterSettings.dryRun(settings); mdBuilder.persistentSettings(settings); } if (metaData.templates() != null) { diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index f0fc5d86b00..862b048e2ec 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -37,7 +38,6 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.common.settings.ClusterSettingsService; import java.io.IOException; import java.util.*; @@ -250,9 +250,9 @@ public class ThreadPool extends AbstractComponent { this.estimatedTimeThread.start(); } - public void setNodeSettingsService(ClusterSettingsService clusterSettingsService) { + public void setNodeSettingsService(ClusterSettings clusterSettings) { if(settingsListenerIsSet.compareAndSet(false, true)) { - clusterSettingsService.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> {validate(s.getAsGroups()); return true;}); + clusterSettings.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> {validate(s.getAsGroups()); return true;}); } else { throw new IllegalStateException("the node settings listener was set more then once"); } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index d0d5b5e230b..82802e99dfa 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -38,7 +39,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -132,9 +132,9 @@ public class TransportService extends AbstractLifecycleComponent index = new AtomicReference<>(); AtomicReference shard = new AtomicReference<>(); ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); - ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); SettingsUpdater updater = new SettingsUpdater(settingsService); @@ -75,7 +73,7 @@ public class SettingsUpdaterTests extends ESTestCase { public void testAllOrNothing() { ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); - ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AtomicReference index = new AtomicReference<>(); AtomicReference shard = new AtomicReference<>(); settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); @@ -101,7 +99,7 @@ public class SettingsUpdaterTests extends ESTestCase { public void testClusterBlock() { ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); - ClusterSettingsService settingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AtomicReference index = new AtomicReference<>(); AtomicReference shard = new AtomicReference<>(); settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 69fb6cb1a61..e9627b10068 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -31,11 +31,10 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.ClusterSettingsService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.settings.IndexDynamicSettings; @@ -86,7 +85,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { ClusterModule module = new ClusterModule(Settings.EMPTY); module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster)); - assertInstanceBinding(module, ClusterSettingsService.class, service -> service.getClusterSettings().hasDynamicSetting("foo.bar")); + assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } public void testRegisterIndexDynamicSettingDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index bc4ef8235d4..e622036e13b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.hamcrest.Matchers; @@ -283,7 +282,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2); settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3); settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0); - ClusterSettingsService service = new ClusterSettingsService(settingsBuilder().build(), new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings service = new ClusterSettings(settingsBuilder().build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings.build(), service); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f)); @@ -312,7 +311,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testNoRebalanceOnPrimaryOverload() { Settings.Builder settings = settingsBuilder(); AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(), - new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), new ShardsAllocators(settings.build(), + new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings.build(), NoopGatewayAllocator.INSTANCE, new ShardsAllocator() { @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 279687a004f..52e88ea3bc9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -46,7 +45,7 @@ import static org.hamcrest.CoreMatchers.equalTo; */ public class DiskThresholdDeciderUnitTests extends ESTestCase { public void testDynamicSettings() { - ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -84,7 +83,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanAllocateUsesMaxAvailableSpace() { - ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -125,7 +124,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanRemainUsesLeastAvailableSpace() { - ClusterSettingsService nss = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); ImmutableOpenMap.Builder shardRoutingMap = ImmutableOpenMap.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index b0a49a93936..1bdc39036a3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import java.util.EnumSet; @@ -162,8 +161,8 @@ public class EnableAllocationTests extends ESAllocationTestCase { .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) .build(); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(build, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - AllocationService strategy = createAllocationService(build, clusterSettingsService, getRandom()); + ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -224,7 +223,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); } - clusterSettingsService.applySettings(clusterState.metaData().settings()); + clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 6 shards to be started 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(6)); @@ -264,8 +263,8 @@ public class EnableAllocationTests extends ESAllocationTestCase { .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) .build(); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(build, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - AllocationService strategy = createAllocationService(build, clusterSettingsService, getRandom()); + ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -315,7 +314,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices() .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); } - clusterSettingsService.applySettings(clusterState.metaData().settings()); + clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 4 primaries to be started and 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4)); diff --git a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index c484df7d99d..bb9d23db1cb 100644 --- a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.concurrent.atomic.AtomicBoolean; @@ -88,7 +87,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicReference lastException = new AtomicReference<>(null); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { @Override public CircuitBreaker getBreaker(String name) { @@ -148,7 +147,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicInteger parentTripped = new AtomicInteger(0); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { @Override public CircuitBreaker getBreaker(String name) { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java deleted file mode 100644 index 28a639e8903..00000000000 --- a/core/src/test/java/org/elasticsearch/common/settings/ClusterSettingsTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.settings; - -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; -import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; - -/** - */ -public class ClusterSettingsTests extends ESTestCase { - - public void testGet() { - ClusterSettings settings = new ClusterSettings(); - Setting setting = settings.get("cluster.routing.allocation.require.value"); - assertEquals(setting, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING); - - setting = settings.get("cluster.routing.allocation.total_shards_per_node"); - assertEquals(setting, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING); - } - - public void testIsDynamic(){ - ClusterSettings settings = new ClusterSettings(new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster)))); - assertFalse(settings.hasDynamicSetting("foo.bar.baz")); - assertTrue(settings.hasDynamicSetting("foo.bar")); - assertNotNull(settings.get("foo.bar.baz")); - } - - public void testDiff() throws IOException { - Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster); - Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); - ClusterSettings settings = new ClusterSettings(new HashSet<>(Arrays.asList(foobar, foobarbaz))); - Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); - assertEquals(diff.getAsMap().size(), 1); - assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(1)); - - diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).build()); - assertEquals(diff.getAsMap().size(), 1); - assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(17)); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java similarity index 65% rename from core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java rename to core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index ae6fd79aa58..c837bbb7d98 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -18,23 +18,22 @@ */ package org.elasticsearch.common.settings; +import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.concurrent.atomic.AtomicInteger; -public class SettingsServiceTests extends ESTestCase { +public class ScopedSettingsTests extends ESTestCase { public void testAddConsumer() { Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); - SettingsService service = new SettingsService(Settings.EMPTY) { - @Override - protected Setting getSetting(String key) { - if (key.equals(testSetting.getKey())) { - return testSetting; - } - return null; - } - }; + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); AtomicInteger consumer = new AtomicInteger(); service.addSettingsUpdateConsumer(testSetting, consumer::set); @@ -46,7 +45,6 @@ public class SettingsServiceTests extends ESTestCase { assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); } - try { service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {consumer.set(a); consumer2.set(b);}); fail("setting not registered"); @@ -63,17 +61,7 @@ public class SettingsServiceTests extends ESTestCase { public void testApply() { Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); - SettingsService service = new SettingsService(Settings.EMPTY) { - @Override - protected Setting getSetting(String key) { - if (key.equals(testSetting.getKey())) { - return testSetting; - } else if (key.equals(testSetting2.getKey())) { - return testSetting2; - } - return null; - } - }; + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); AtomicInteger consumer = new AtomicInteger(); service.addSettingsUpdateConsumer(testSetting, consumer::set); @@ -121,4 +109,33 @@ public class SettingsServiceTests extends ESTestCase { assertEquals(2, aC.get()); assertEquals(15, bC.get()); } + + public void testGet() { + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + Setting setting = settings.get("cluster.routing.allocation.require.value"); + assertEquals(setting, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING); + + setting = settings.get("cluster.routing.allocation.total_shards_per_node"); + assertEquals(setting, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING); + } + + public void testIsDynamic(){ + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster)))); + assertFalse(settings.hasDynamicSetting("foo.bar.baz")); + assertTrue(settings.hasDynamicSetting("foo.bar")); + assertNotNull(settings.get("foo.bar.baz")); + } + + public void testDiff() throws IOException { + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster); + Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); + Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(1)); + + diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).build()); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(17)); + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 5787ae59f56..a61d91c8f25 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -42,7 +42,7 @@ public class SettingTests extends ESTestCase { ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); AtomicReference value = new AtomicReference<>(null); - ClusterSettingsService.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger, Settings.EMPTY); try { settingUpdater.prepareApply(Settings.builder().put("a.byte.size", 12).build()); fail("no unit"); @@ -58,7 +58,7 @@ public class SettingTests extends ESTestCase { public void testSimpleUpdate() { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); AtomicReference atomicBoolean = new AtomicReference<>(null); - ClusterSettingsService.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); Settings build = Settings.builder().put("foo.bar", false).build(); settingUpdater.prepareApply(build); assertNull(atomicBoolean.get()); @@ -96,7 +96,7 @@ public class SettingTests extends ESTestCase { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); - ClusterSettingsService.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger, Settings.EMPTY); settingUpdater.prepareApply(Settings.builder().put("foo.bar", true).build()); assertNull(ab1.get()); assertNull(ab2.get()); @@ -124,7 +124,7 @@ public class SettingTests extends ESTestCase { assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); - ClusterSettingsService.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); settingUpdater.apply(); assertSame("no update - type has not changed", type, ref.get()); @@ -147,7 +147,7 @@ public class SettingTests extends ESTestCase { Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Cluster); assertFalse(integerSetting.isGroupSetting()); AtomicReference ref = new AtomicReference<>(null); - ClusterSettingsService.SettingUpdater settingUpdater = integerSetting.newUpdater(ref::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = integerSetting.newUpdater(ref::set, logger, Settings.EMPTY); assertNull(ref.get()); assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.int.bar", "2").build())); settingUpdater.rollback(); @@ -169,7 +169,7 @@ public class SettingTests extends ESTestCase { AtomicReference ref = new AtomicReference<>(null); Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.Cluster); assertTrue(setting.isGroupSetting()); - ClusterSettingsService.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build())); settingUpdater.apply(); @@ -209,7 +209,7 @@ public class SettingTests extends ESTestCase { assertTrue(setting.match("foo.bar.baz")); assertFalse(setting.match("foo.baz.bar")); - ClusterSettingsService.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> false); + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> false); try { predicateSettingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build()); fail("not accepted"); @@ -243,7 +243,7 @@ public class SettingTests extends ESTestCase { Composite c = new Composite(); Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.Cluster); Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.Cluster); - ClusterSettingsService.SettingUpdater settingUpdater = Setting.compoundUpdater(c::set, a, b, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = Setting.compoundUpdater(c::set, a, b, logger, Settings.EMPTY); assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); settingUpdater.apply(); assertNull(c.a); diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index f3783fa6d12..184de7f385e 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; @@ -339,7 +338,7 @@ public class BigArraysTests extends ESSingleNodeTestCase { Settings.builder() .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), size - 1, ByteSizeUnit.BYTES) .build(), - new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); try { @@ -359,7 +358,7 @@ public class BigArraysTests extends ESSingleNodeTestCase { Settings.builder() .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) .build(), - new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); final int size = scaledRandomIntBetween(1, 20); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 2dac067fed3..1f6f0bdc0e3 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -68,7 +67,7 @@ public class NodeJoinControllerTests extends ESTestCase { // make sure we have a master clusterService.setState(ClusterState.builder(clusterService.state()).nodes(DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.id()))); nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))), Settings.EMPTY); + new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), Settings.EMPTY); } public void testSimpleJoinAccumulation() throws InterruptedException, ExecutionException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 3cb5316cacc..e5b3337683b 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -238,7 +237,7 @@ public class PublishClusterStateActionTests extends ESTestCase { protected MockPublishAction buildPublishClusterStateAction(Settings settings, MockTransportService transportService, DiscoveryNodesProvider nodesProvider, PublishClusterStateAction.NewPendingClusterStateListener listener) { - DiscoverySettings discoverySettings = new DiscoverySettings(settings, new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + DiscoverySettings discoverySettings = new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); return new MockPublishAction(settings, transportService, nodesProvider, listener, discoverySettings, ClusterName.DEFAULT); } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java index 38da1ac135c..212d7ecbb7b 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -67,7 +66,7 @@ public class CircuitBreakerUnitTests extends ESTestCase { } public void testRegisterCustomBreaker() throws Exception { - CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); String customName = "custom"; BreakerSettings settings = new BreakerSettings(customName, 20, 1.0); service.registerBreaker(settings); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index ca70fc1fe2f..75a2b14060b 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; @@ -61,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class RecoverySourceHandlerTests extends ESTestCase { private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1); - private final ClusterSettingsService service = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + private final ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); public void testSendFiles() throws Throwable { Settings settings = Settings.builder().put("indices.recovery.concurrent_streams", 1). diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 47d82a36088..ddaa53fce7b 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool.Names; @@ -92,10 +91,10 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); - clusterSettingsService.applySettings( + clusterSettings.applySettings( settingsBuilder() .put("threadpool." + threadPoolName + ".type", invalidThreadPoolType.getType()) .build() @@ -117,13 +116,13 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Settings nodeSettings = Settings.settingsBuilder() .put("name", "testCachedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - Settings settings = clusterSettingsService.applySettings(settingsBuilder() + Settings settings = clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); @@ -139,7 +138,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change keep alive Executor oldExecutor = threadPool.executor(threadPoolName); - settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value changed assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -148,7 +147,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Set the same keep alive - settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value didn't change assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -168,10 +167,10 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Settings nodeSettings = Settings.settingsBuilder() .put("name", "testFixedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - Settings settings = clusterSettingsService.applySettings(settingsBuilder() + Settings settings = clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".size", "15") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); @@ -184,7 +183,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(0L)); // Put old type back - settings = clusterSettingsService.applySettings(Settings.EMPTY); + settings = clusterSettings.applySettings(Settings.EMPTY); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); // Make sure keep alive value is not used assertThat(info(threadPool, threadPoolName).getKeepAlive(), nullValue()); @@ -197,7 +196,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change size Executor oldExecutor = threadPool.executor(threadPoolName); - settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".size", "10").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".size", "10").build()); // Make sure size values changed assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(10)); @@ -208,7 +207,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Change queue capacity - settings = clusterSettingsService.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".queue", "500") + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".queue", "500") .build()); } finally { terminateThreadPoolIfNeeded(threadPool); @@ -223,8 +222,8 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { .put("threadpool." + threadPoolName + ".size", 10) .put("name", "testScalingExecutorType").build(); threadPool = new ThreadPool(nodeSettings); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1)); assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L)); @@ -233,7 +232,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change settings that doesn't require pool replacement Executor oldExecutor = threadPool.executor(threadPoolName); - clusterSettingsService.applySettings(settingsBuilder() + clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .put("threadpool." + threadPoolName + ".min", "2") .put("threadpool." + threadPoolName + ".size", "15") @@ -261,8 +260,8 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { .put("threadpool." + threadPoolName + ".queue_size", 1000) .put("name", "testCachedExecutorType").build(); threadPool = new ThreadPool(nodeSettings); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L); final CountDownLatch latch = new CountDownLatch(1); @@ -276,7 +275,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { } } ); - clusterSettingsService.applySettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); + clusterSettings.applySettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); assertThat(threadPool.executor(threadPoolName), not(sameInstance(oldExecutor))); assertThat(oldExecutor.isShutdown(), equalTo(true)); assertThat(oldExecutor.isTerminating(), equalTo(true)); @@ -298,8 +297,8 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { .put("threadpool.my_pool2.queue_size", "1") .put("name", "testCustomThreadPool").build(); threadPool = new ThreadPool(nodeSettings); - ClusterSettingsService clusterSettingsService = new ClusterSettingsService(nodeSettings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - threadPool.setNodeSettingsService(clusterSettingsService); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setNodeSettingsService(clusterSettings); ThreadPoolInfo groups = threadPool.info(); boolean foundPool1 = false; boolean foundPool2 = false; @@ -331,7 +330,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Settings settings = Settings.builder() .put("threadpool.my_pool2.size", "10") .build(); - clusterSettingsService.applySettings(settings); + clusterSettings.applySettings(settings); groups = threadPool.info(); foundPool1 = false; diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index f7a9c221f7c..6599412834d 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -652,7 +651,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { includeSettings = "test"; excludeSettings = "DOESN'T_MATCH"; } - ClusterSettingsService service = new ClusterSettingsService(Settings.EMPTY, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); serviceA.setDynamicSettings(service); service.applySettings(Settings.builder() .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), includeSettings, TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), excludeSettings) diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index ca3e4a21604..02fffda722a 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty.NettyTransport; @@ -65,7 +64,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { threadPool = new ThreadPool(settings); - threadPool.setNodeSettingsService(new ClusterSettingsService(settings, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); + threadPool.setNodeSettingsService(new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); NetworkService networkService = new NetworkService(settings); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(settings, threadPool), new NoneCircuitBreakerService()); nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, Version.CURRENT, new NamedWriteableRegistry()); diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java index 59b57fca7de..15e49d7a60d 100755 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java @@ -27,7 +27,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java index e0d15a584bd..3fd3f1948e8 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java @@ -27,7 +27,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java index e004230b735..c1a00979fcd 100755 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java @@ -27,7 +27,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index b105d269163..3e9b0c09cb2 100644 --- a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -29,11 +29,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; @@ -78,11 +78,11 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { } @Inject - public MockInternalClusterInfoService(Settings settings, ClusterSettingsService clusterSettingsService, + public MockInternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { - super(settings, clusterSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); + super(settings, clusterSettings, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); this.clusterName = ClusterName.clusterNameFromSettings(settings); stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); diff --git a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 09a0ab9b6d3..98b5181636d 100644 --- a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -22,12 +22,12 @@ package org.elasticsearch.search; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.dfs.DfsPhase; @@ -67,13 +67,13 @@ public class MockSearchService extends SearchService { } @Inject - public MockSearchService(Settings settings, ClusterSettingsService clusterSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, + public MockSearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { - super(settings, clusterSettingsService, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, + super(settings, clusterSettings, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, queryPhase, fetchPhase, indicesQueryCache); } - + @Override protected void putContext(SearchContext context) { super.putContext(context); diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index e4d3a3c42a5..091ff23cb5a 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -44,7 +44,6 @@ import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; -import org.elasticsearch.common.settings.ClusterSettingsService; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.lang.reflect.Constructor; @@ -68,37 +67,37 @@ public abstract class ESAllocationTestCase extends ESTestCase { } public static MockAllocationService createAllocationService(Settings settings, Random random) { - return createAllocationService(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), random); + return createAllocationService(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random); } - public static MockAllocationService createAllocationService(Settings settings, ClusterSettingsService clusterSettingsService, Random random) { + public static MockAllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings, Random random) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, clusterSettingsService, random), + randomAllocationDeciders(settings, clusterSettings, random), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), + randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), clusterInfoService); } public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator allocator) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new ClusterSettingsService(Settings.Builder.EMPTY_SETTINGS, new ClusterSettings(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), getRandom()), + randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, allocator), EmptyClusterInfoService.INSTANCE); } - public static AllocationDeciders randomAllocationDeciders(Settings settings, ClusterSettingsService clusterSettingsService, Random random) { + public static AllocationDeciders randomAllocationDeciders(Settings settings, ClusterSettings clusterSettings, Random random) { final List> defaultAllocationDeciders = ClusterModule.DEFAULT_ALLOCATION_DECIDERS; final List list = new ArrayList<>(); for (Class deciderClass : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { try { try { - Constructor constructor = deciderClass.getConstructor(Settings.class, ClusterSettingsService.class); - list.add(constructor.newInstance(settings, clusterSettingsService)); + Constructor constructor = deciderClass.getConstructor(Settings.class, ClusterSettings.class); + list.add(constructor.newInstance(settings, clusterSettings)); } catch (NoSuchMethodException e) { Constructor constructor = null; constructor = deciderClass.getConstructor(Settings.class); From d4e7bd2cc3ef7f5cc3adc7c9e395551720e3de26 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 10:14:33 +0100 Subject: [PATCH 011/322] predicate -> consumer --- .../settings/AbstractScopedSettings.java | 8 ++++--- .../common/settings/Setting.java | 22 +++++++++++-------- .../discovery/zen/ZenDiscovery.java | 1 - .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../common/settings/ScopedSettingsTests.java | 2 +- .../common/settings/SettingTests.java | 8 +------ 6 files changed, 21 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 66c966dde88..e5dedbba7f1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -155,7 +155,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. *

*/ - public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Predicate predicate) { + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer predicate) { if (setting != get(setting.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); } @@ -167,6 +167,8 @@ public abstract class AbstractScopedSettings extends AbstractComponent { *

* Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. *

+ * This method registers a compound updater that is useful if two settings are depending on each other. The consumer is always provided + * with both values even if only one of the two changes. */ public synchronized void addSettingsUpdateConsumer(Setting
a, Setting b, BiConsumer consumer) { if (a != get(a.getKey())) { @@ -185,7 +187,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { *

*/ public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer) { - addSettingsUpdateConsumer(setting, consumer, (s) -> true); + addSettingsUpdateConsumer(setting, consumer, (s) -> {}); } /** @@ -238,7 +240,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } /** - * Returns a settings object that contains all clustersettings that are not + * Returns a settings object that contains all settings that are not * already set in the given source. The diff contains either the default value for each * setting or the settings value in the given default settings. */ diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index fda10ea226b..8b3227d7b9f 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -150,10 +150,10 @@ public class Setting extends ToXContentToBytes { } AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { - return newUpdater(consumer, logger, settings, (s) -> true); + return newUpdater(consumer, logger, settings, (s) -> {}); } - AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { if (isDynamic()) { return new Updater(consumer, logger, settings, accept); } else { @@ -207,13 +207,13 @@ public class Setting extends ToXContentToBytes { private class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final ESLogger logger; - private final Predicate accept; + private final Consumer accept; private String value; private boolean commitPending; private String pendingValue; private T valueInstance; - public Updater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + public Updater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { this.consumer = consumer; this.logger = logger; value = getRaw(settings); @@ -228,8 +228,10 @@ public class Setting extends ToXContentToBytes { } if (value.equals(newValue) == false) { T inst = get(settings); - if (accept.test(inst) == false) { - throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + getRaw(settings) + "]"); + try { + accept.accept(inst); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + getRaw(settings) + "]", e); } pendingValue = newValue; valueInstance = inst; @@ -325,7 +327,7 @@ public class Setting extends ToXContentToBytes { } @Override - public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Predicate accept) { + public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { if (isDynamic() == false) { throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); } @@ -338,8 +340,10 @@ public class Setting extends ToXContentToBytes { public boolean prepareApply(Settings settings) { Settings currentSettings = get(settings); if (currentSettings.equals(committedSettings) == false) { - if (accept.test(currentSettings) == false) { - throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + committedSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]"); + try { + accept.accept(currentSettings); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + committedSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e); } pendingSettings = currentSettings; return true; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 2c230d22599..441f6403d1d 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -178,7 +178,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen if (value > masterNodes) { throw new IllegalArgumentException("cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current master nodes count [" + masterNodes + "]"); } - return true; }); clusterSettings.addSettingsUpdateConsumer(REJOIN_ON_MASTER_GONE_SETTING, this::setRejoingOnMasterGone); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 862b048e2ec..870ae387907 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -252,7 +252,7 @@ public class ThreadPool extends AbstractComponent { public void setNodeSettingsService(ClusterSettings clusterSettings) { if(settingsListenerIsSet.compareAndSet(false, true)) { - clusterSettings.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> {validate(s.getAsGroups()); return true;}); + clusterSettings.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> validate(s.getAsGroups())); } else { throw new IllegalStateException("the node settings listener was set more then once"); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index c837bbb7d98..bfc91bc92a3 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -66,7 +66,7 @@ public class ScopedSettingsTests extends ESTestCase { AtomicInteger consumer = new AtomicInteger(); service.addSettingsUpdateConsumer(testSetting, consumer::set); AtomicInteger consumer2 = new AtomicInteger(); - service.addSettingsUpdateConsumer(testSetting2, consumer2::set, (s) -> s > 0); + service.addSettingsUpdateConsumer(testSetting2, consumer2::set, (s) -> assertTrue(s > 0)); AtomicInteger aC = new AtomicInteger(); AtomicInteger bC = new AtomicInteger(); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index a61d91c8f25..5f6f2b3615c 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -209,7 +209,7 @@ public class SettingTests extends ESTestCase { assertTrue(setting.match("foo.bar.baz")); assertFalse(setting.match("foo.baz.bar")); - ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> false); + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> assertFalse(true)); try { predicateSettingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build()); fail("not accepted"); @@ -273,10 +273,4 @@ public class SettingTests extends ESTestCase { assertEquals(1, c.b.intValue()); } - - - - - - } From 3d946871e8aa6c563db0195dcced8ebec5420660 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 10:30:18 +0100 Subject: [PATCH 012/322] move setting registration into settings module - this stuff needs to be present for transport clients as well. --- .../elasticsearch/cluster/ClusterModule.java | 24 ----------------- .../settings/AbstractScopedSettings.java | 6 ++--- .../common/settings/SettingsModule.java | 27 ++++++++++++++++++- .../cluster/ClusterModuleTests.java | 10 +++---- 4 files changed, 33 insertions(+), 34 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 7505926868a..ca4de192e14 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -108,7 +108,6 @@ public class ClusterModule extends AbstractModule { SnapshotInProgressAllocationDecider.class)); private final Settings settings; - private final Map> clusterDynamicSettings = new HashMap<>(); private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder(); private final ExtensionPoint.SelectedType shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class); private final ExtensionPoint.ClassSet allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class); @@ -120,7 +119,6 @@ public class ClusterModule extends AbstractModule { public ClusterModule(Settings settings) { this.settings = settings; - registerBuiltinClusterSettings(); registerBuiltinIndexSettings(); for (Class decider : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { @@ -130,12 +128,6 @@ public class ClusterModule extends AbstractModule { registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class); } - private void registerBuiltinClusterSettings() { - for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { - registerSetting(setting); - } - } - private void registerBuiltinIndexSettings() { registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); @@ -204,18 +196,6 @@ public class ClusterModule extends AbstractModule { indexDynamicSettings.addSetting(setting, validator); } - public void registerSetting(Setting setting) { - switch (setting.getScope()) { - case Cluster: - if (clusterDynamicSettings.containsKey(setting.getKey())) { - throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); - } - clusterDynamicSettings.put(setting.getKey(), setting); - break; - case Index: - throw new UnsupportedOperationException("not yet implemented"); - } - } public void registerAllocationDecider(Class allocationDecider) { allocationDeciders.registerExtension(allocationDecider); @@ -261,9 +241,5 @@ public class ClusterModule extends AbstractModule { bind(NodeIndexDeletedAction.class).asEagerSingleton(); bind(NodeMappingRefreshAction.class).asEagerSingleton(); bind(MappingUpdatedAction.class).asEagerSingleton(); - final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); - bind(ClusterSettings.class).toInstance(clusterSettings); - - } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index e5dedbba7f1..af82f97bf0c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -21,13 +21,11 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLoggerFactory; import java.util.*; import java.util.function.BiConsumer; import java.util.function.Consumer; -import java.util.function.Predicate; /** * A basic setting service that can be used for per-index and per-cluster settings. @@ -152,7 +150,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { /** * Adds a settings consumer with a predicate that is only evaluated at update time. *

- * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + * Note: Only settings registered in {@link SettingsModule} can be changed dynamically. *

*/ public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer predicate) { @@ -165,7 +163,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { /** * Adds a settings consumer that accepts the values for two settings. The consumer if only notified if one or both settings change. *

- * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + * Note: Only settings registered in {@link SettingsModule} can be changed dynamically. *

* This method registers a compound updater that is useful if two settings are depending on each other. The consumer is always provided * with both values even if only one of the two changes. diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 2ae4799d9f3..647d9e02963 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -21,6 +21,10 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.inject.AbstractModule; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; + /** * A module that binds the provided settings to the {@link Settings} interface. * @@ -30,15 +34,36 @@ public class SettingsModule extends AbstractModule { private final Settings settings; private final SettingsFilter settingsFilter; + private final Map> clusterDynamicSettings = new HashMap<>(); + public SettingsModule(Settings settings, SettingsFilter settingsFilter) { this.settings = settings; this.settingsFilter = settingsFilter; + for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { + registerSetting(setting); + } } @Override protected void configure() { bind(Settings.class).toInstance(settings); bind(SettingsFilter.class).toInstance(settingsFilter); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); + bind(ClusterSettings.class).toInstance(clusterSettings); } -} \ No newline at end of file + + public void registerSetting(Setting setting) { + switch (setting.getScope()) { + case Cluster: + if (clusterDynamicSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + clusterDynamicSettings.put(setting.getKey(), setting); + break; + case Index: + throw new UnsupportedOperationException("not yet implemented"); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index e9627b10068..2b77c3c7514 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -34,9 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.*; import org.elasticsearch.index.settings.IndexDynamicSettings; public class ClusterModuleTests extends ModuleTestCase { @@ -74,7 +72,8 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterClusterDynamicSettingDuplicate() { - ClusterModule module = new ClusterModule(Settings.EMPTY); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); try { module.registerSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING); } catch (IllegalArgumentException e) { @@ -83,7 +82,8 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterClusterDynamicSetting() { - ClusterModule module = new ClusterModule(Settings.EMPTY); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster)); assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } From 83ef665cbac54c19668246ed734a7883717fe885 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 10:36:25 +0100 Subject: [PATCH 013/322] fix tests --- .../routing/allocation/allocator/BalancedShardsAllocator.java | 2 +- .../threadpool/UpdateThreadPoolSettingsTests.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index f52b3228c47..1388eddf76f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -94,7 +94,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public void setWeightFunction(float indexBalance, float shardBalanceFactor) { - weightFunction = new WeightFunction(indexBalance, weightFunction.shardBalance); + weightFunction = new WeightFunction(indexBalance, shardBalanceFactor); } diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index ddaa53fce7b..07f34071442 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -101,8 +101,9 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException e) { + assertEquals("illegal value can't update [threadpool.] from [{}] to [{" + threadPoolName + ".type=" + invalidThreadPoolType.getType() + "}]", e.getMessage()); assertThat( - e.getMessage(), + e.getCause().getMessage(), is("setting threadpool." + threadPoolName + ".type to " + invalidThreadPoolType.getType() + " is not permitted; must be " + validThreadPoolType.getType())); } finally { terminateThreadPoolIfNeeded(threadPool); From fd79e40e65861e85c500bf7bfb956af31d1d40b6 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 11:14:35 +0100 Subject: [PATCH 014/322] fix expcetion msg comparison --- .../snapshots/DedicatedClusterSnapshotRestoreIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 2c38c4c74c1..9133828c070 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -160,7 +160,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet(); fail("can't restore minimum master nodes"); } catch (IllegalArgumentException ex) { - assertEquals("cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [1]", ex.getMessage()); + assertEquals("illegal value can't update [discovery.zen.minimum_master_nodes] from [1] to [2]", ex.getMessage()); + assertEquals("cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [1]", ex.getCause().getMessage()); } logger.info("--> ensure that zen discovery minimum master nodes wasn't restored"); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() From 85a1b54867a81c334b29485e686cd4a086357a00 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 11:41:14 +0100 Subject: [PATCH 015/322] fix compilation --- .../org/elasticsearch/discovery/azure/AzureDiscovery.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java index 15e49d7a60d..89d6d17298f 100755 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java @@ -22,6 +22,7 @@ package org.elasticsearch.discovery.azure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -39,9 +40,9 @@ public class AzureDiscovery extends ZenDiscovery { @Inject public AzureDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } From f0add6239e8be968566df4ab55fe8a2ea7505bbd Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 11:57:27 +0100 Subject: [PATCH 016/322] make enum names all uppercase --- .../close/TransportCloseIndexAction.java | 2 +- .../action/support/DestructiveOperations.java | 2 +- .../cluster/InternalClusterInfoService.java | 4 +-- .../action/index/MappingUpdatedAction.java | 2 +- .../cluster/metadata/MetaData.java | 3 +- .../allocator/BalancedShardsAllocator.java | 6 ++-- .../decider/AwarenessAllocationDecider.java | 4 +-- .../ClusterRebalanceAllocationDecider.java | 2 +- .../ConcurrentRebalanceAllocationDecider.java | 2 +- .../decider/DiskThresholdDecider.java | 10 +++--- .../decider/EnableAllocationDecider.java | 4 +-- .../decider/FilterAllocationDecider.java | 6 ++-- .../decider/ShardsLimitAllocationDecider.java | 2 +- .../SnapshotInProgressAllocationDecider.java | 2 +- .../decider/ThrottlingAllocationDecider.java | 4 +-- .../service/InternalClusterService.java | 2 +- .../common/settings/ClusterSettings.java | 2 +- .../common/settings/Setting.java | 4 +-- .../common/settings/SettingsModule.java | 4 +-- .../discovery/DiscoverySettings.java | 8 ++--- .../discovery/zen/ZenDiscovery.java | 2 +- .../zen/elect/ElectMasterService.java | 2 +- .../index/store/IndexStoreConfig.java | 4 +-- .../HierarchyCircuitBreakerService.java | 10 +++--- .../indices/recovery/RecoverySettings.java | 24 +++++++------- .../indices/ttl/IndicesTTLService.java | 2 +- .../elasticsearch/search/SearchService.java | 2 +- .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../transport/TransportService.java | 4 +-- .../cluster/ClusterModuleTests.java | 2 +- .../common/settings/ScopedSettingsTests.java | 14 ++++---- .../common/settings/SettingTests.java | 32 +++++++++---------- 32 files changed, 87 insertions(+), 88 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 57c9fdbf0da..e454fcabc7a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -45,7 +45,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.CLUSTER); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java index 68b775bd532..5f2fb33e043 100644 --- a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java +++ b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java @@ -33,7 +33,7 @@ public final class DestructiveOperations extends AbstractComponent { /** * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. */ - public static final Setting REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.Cluster); + public static final Setting REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.CLUSTER); private volatile boolean destructiveRequiresName; @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 377d6578ac1..925a5a12ed6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -64,8 +64,8 @@ import java.util.concurrent.TimeUnit; */ public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener { - public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, Setting.Scope.Cluster); - public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, Setting.Scope.Cluster); + public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, Setting.Scope.CLUSTER); + public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, Setting.Scope.CLUSTER); private volatile TimeValue updateFrequency; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 7df0e24210e..9e57fe3a48a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -41,7 +41,7 @@ import java.util.concurrent.TimeoutException; */ public class MappingUpdatedAction extends AbstractComponent { - public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); + public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 5b8514c22bc..0bcebfbd543 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -40,7 +40,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -141,7 +140,7 @@ public class MetaData implements Iterable, Diffable, Fr } - public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.Cluster); + public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.CLUSTER); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 1388eddf76f..da73cdc1455 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -73,9 +73,9 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; */ public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { - public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, Setting.Scope.Cluster); - public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, Setting.Scope.Cluster); - public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, Setting.Scope.Cluster); + public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, Setting.Scope.CLUSTER); + public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, Setting.Scope.CLUSTER); + public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, Setting.Scope.CLUSTER); private volatile WeightFunction weightFunction; private volatile float threshold; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 148fdd82f35..a66c8ddaef7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -77,8 +77,8 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.CLUSTER); private String[] awarenessAttributes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 036695bee46..0e5e744d274 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -48,7 +48,7 @@ import java.util.Locale; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.CLUSTER); /** * An enum representation for the configured re-balance type. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 7843a31ff91..21023400e32 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -42,7 +42,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, true, Setting.Scope.CLUSTER); private volatile int clusterConcurrentRebalance; @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 5ffaa1f0e84..49cf5abadd4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -81,11 +81,11 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile boolean enabled; private volatile TimeValue rerouteInterval; - public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.Cluster);; - public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.CLUSTER);; + public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); /** * Listens for a node to go over the high watermark and kicks off an empty diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 61156de7137..1df362399f7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -59,10 +59,10 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.CLUSTER); public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; - public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.CLUSTER); public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; private volatile Rebalance enableRebalance; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index 87ec158e8ca..4c451e7fffa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -64,9 +64,9 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String INDEX_ROUTING_INCLUDE_GROUP = "index.routing.allocation.include."; public static final String INDEX_ROUTING_EXCLUDE_GROUP = "index.routing.allocation.exclude."; - public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.CLUSTER); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 3bc02879e61..9149d04cf60 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -64,7 +64,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.CLUSTER); @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index fdc65bf7ac8..597f0add8da 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -39,7 +39,7 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { /** * Disables relocation of shards that are currently being snapshotted. */ - public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, Setting.Scope.CLUSTER); private volatile boolean enableRelocation = false; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index f7369f35dd9..9e3d96b4e18 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -52,8 +52,8 @@ public class ThrottlingAllocationDecider extends AllocationDecider { public static final String NAME = "throttling"; public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, true, Setting.Scope.Cluster); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.Cluster); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.CLUSTER); private volatile int primariesInitialRecoveries; private volatile int concurrentRecoveries; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 945b9d1ea59..09f15994848 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -63,7 +63,7 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF */ public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { - public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); + public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); public static final String SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL = "cluster.service.reconnect_interval"; public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index bf9c24dad8d..602081a1d47 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -45,7 +45,7 @@ import java.util.*; public final class ClusterSettings extends AbstractScopedSettings { public ClusterSettings(Settings settings, Set> settingsSet) { - super(settings, settingsSet, Setting.Scope.Cluster); + super(settings, settingsSet, Setting.Scope.CLUSTER); } /** diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 8b3227d7b9f..3f182885151 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -145,8 +145,8 @@ public class Setting extends ToXContentToBytes { * The settings scope - settings can either be cluster settings or per index settings. */ public enum Scope { - Cluster, - Index; + CLUSTER, + INDEX; } AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 647d9e02963..8bc8ce1b651 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -55,13 +55,13 @@ public class SettingsModule extends AbstractModule { public void registerSetting(Setting setting) { switch (setting.getScope()) { - case Cluster: + case CLUSTER: if (clusterDynamicSettings.containsKey(setting.getKey())) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } clusterDynamicSettings.put(setting.getKey(), setting); break; - case Index: + case INDEX: throw new UnsupportedOperationException("not yet implemented"); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 26902a70e58..519a2330353 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -40,15 +40,15 @@ public class DiscoverySettings extends AbstractComponent { * sets the timeout for a complete publishing cycle, including both sending and committing. the master * will continute to process the next cluster state update after this time has elapsed **/ - public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.Cluster); + public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing * to receive responses within this window will cause the cluster state change to be rejected. */ - public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.Cluster); - public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.Cluster); - public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.Cluster); + public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.CLUSTER); + public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.CLUSTER); + public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.CLUSTER); public final static int NO_MASTER_BLOCK_ID = 2; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 441f6403d1d..d69227d8f0b 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -75,7 +75,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { - public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.Cluster); + public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.CLUSTER); public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout"; public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout"; public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts"; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index 3ba338b4070..9cca1edfc5e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -40,7 +40,7 @@ import java.util.List; */ public class ElectMasterService extends AbstractComponent { - public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, Setting.Scope.Cluster); + public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, Setting.Scope.CLUSTER); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 0588dafd310..ed561876735 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -36,11 +36,11 @@ public class IndexStoreConfig{ /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ - public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.Cluster); + public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.CLUSTER); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ - public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.Cluster); + public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.CLUSTER); private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index fc80495bb1b..4c2a2ced09e 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -46,14 +46,14 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); - public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.Cluster); + public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.CLUSTER); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.Cluster); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.Cluster); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.CLUSTER); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.CLUSTER); public static final String FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.fielddata.type"; - public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.Cluster); - public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.Cluster); + public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.CLUSTER); + public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.CLUSTER); public static final String REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.request.type"; public static final String DEFAULT_BREAKER_TYPE = "memory"; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 0e72dedb3ad..c07ef57810a 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -40,37 +40,37 @@ import java.util.concurrent.TimeUnit; */ public class RecoverySettings extends AbstractComponent implements Closeable { - public static final Setting INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.file_chunk_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_TRANSLOG_OPS_SETTING = Setting.intSetting("indices.recovery.translog_ops", 1000, true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_TRANSLOG_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.translog_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_COMPRESS_SETTING = Setting.boolSetting("indices.recovery.compress", true, true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_streams", 3, true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_small_file_streams", 2, true, Setting.Scope.Cluster); - public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.file_chunk_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_TRANSLOG_OPS_SETTING = Setting.intSetting("indices.recovery.translog_ops", 1000, true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_TRANSLOG_SIZE_SETTING = Setting.byteSizeSetting("indices.recovery.translog_size", new ByteSizeValue(512, ByteSizeUnit.KB), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_COMPRESS_SETTING = Setting.boolSetting("indices.recovery.compress", true, true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_streams", 3, true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_small_file_streams", 2, true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.CLUSTER); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes * i.e., local node is not yet known on remote node, remote shard not yet started etc. */ - public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, Setting.Scope.CLUSTER); /** how long to wait before retrying after network related issues */ - public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, Setting.Scope.CLUSTER); /** timeout value to use for requests made as part of the recovery process */ - public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, Setting.Scope.CLUSTER); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. * defaults to twice `indices.recovery.internal_action_timeout`. */ - public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), TimeValue.timeValueSeconds(0), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); /** * recoveries that don't show any activity for more then this interval will be failed. * defaults to `indices.recovery.internal_action_long_timeout` */ - public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.Cluster); + public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes(); diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 454dc460274..0eed82561a3 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -67,7 +67,7 @@ import java.util.concurrent.locks.ReentrantLock; */ public class IndicesTTLService extends AbstractLifecycleComponent { - public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.Cluster); + public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index a41a97e5684..c416ec469de 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -112,7 +112,7 @@ public class SearchService extends AbstractLifecycleComponent imp public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); - public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.Cluster); + public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER); private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 870ae387907..935fa2231cb 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -169,7 +169,7 @@ public class ThreadPool extends AbstractComponent { } } - public static final Setting THREADPOOL_GROUP_SETTING = Setting.groupSetting("threadpool.", true, Setting.Scope.Cluster); + public static final Setting THREADPOOL_GROUP_SETTING = Setting.groupSetting("threadpool.", true, Setting.Scope.CLUSTER); private volatile Map executors; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 82802e99dfa..05d5242ac82 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -87,8 +87,8 @@ public class TransportService extends AbstractLifecycleComponent TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "", Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster); - public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.Cluster);; + public static final Setting TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "", Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER); + public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER);; private final ESLogger tracerLog; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 2b77c3c7514..48b2591559e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -84,7 +84,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); - module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster)); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER)); assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index bfc91bc92a3..9c5320b1354 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -31,8 +31,8 @@ import java.util.concurrent.atomic.AtomicInteger; public class ScopedSettingsTests extends ESTestCase { public void testAddConsumer() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); AtomicInteger consumer = new AtomicInteger(); @@ -59,8 +59,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testApply() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.Cluster); + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); AtomicInteger consumer = new AtomicInteger(); @@ -120,15 +120,15 @@ public class ScopedSettingsTests extends ESTestCase { } public void testIsDynamic(){ - ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster)))); + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER)))); assertFalse(settings.hasDynamicSetting("foo.bar.baz")); assertTrue(settings.hasDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); } public void testDiff() throws IOException { - Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.Cluster); - Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.Cluster); + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER); + Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(diff.getAsMap().size(), 1); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 5f6f2b3615c..3f048e454a5 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -30,14 +30,14 @@ public class SettingTests extends ESTestCase { public void testGet() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); assertFalse(booleanSetting.get(Settings.EMPTY)); assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); } public void testByteSize() { - Setting byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, Setting.Scope.Cluster); + Setting byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, Setting.Scope.CLUSTER); assertFalse(byteSizeValueSetting.isGroupSetting()); ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); @@ -56,7 +56,7 @@ public class SettingTests extends ESTestCase { } public void testSimpleUpdate() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference atomicBoolean = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); Settings build = Settings.builder().put("foo.bar", false).build(); @@ -81,7 +81,7 @@ public class SettingTests extends ESTestCase { } public void testUpdateNotDynamic() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, Setting.Scope.Cluster); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, Setting.Scope.CLUSTER); assertFalse(booleanSetting.isGroupSetting()); AtomicReference atomicBoolean = new AtomicReference<>(null); try { @@ -93,7 +93,7 @@ public class SettingTests extends ESTestCase { } public void testUpdaterIsIsolated() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.Cluster); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger, Settings.EMPTY); @@ -107,20 +107,20 @@ public class SettingTests extends ESTestCase { public void testDefault() { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); - Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.Cluster); + Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER); assertFalse(setting.isGroupSetting()); String aDefault = setting.getDefault(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); - Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.Cluster); + Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.Cluster); + Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.CLUSTER); assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); @@ -144,7 +144,7 @@ public class SettingTests extends ESTestCase { } public void testRollback() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Cluster); + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); assertFalse(integerSetting.isGroupSetting()); AtomicReference ref = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = integerSetting.newUpdater(ref::set, logger, Settings.EMPTY); @@ -159,15 +159,15 @@ public class SettingTests extends ESTestCase { } public void testType() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Cluster); - assertEquals(integerSetting.getScope(), Setting.Scope.Cluster); - integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.Index); - assertEquals(integerSetting.getScope(), Setting.Scope.Index); + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); + assertEquals(integerSetting.getScope(), Setting.Scope.CLUSTER); + integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.INDEX); + assertEquals(integerSetting.getScope(), Setting.Scope.INDEX); } public void testGroups() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.Cluster); + Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); assertTrue(setting.isGroupSetting()); ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); @@ -241,8 +241,8 @@ public class SettingTests extends ESTestCase { public void testComposite() { Composite c = new Composite(); - Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.Cluster); - Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.Cluster); + Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.CLUSTER); + Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.CLUSTER); ClusterSettings.SettingUpdater settingUpdater = Setting.compoundUpdater(c::set, a, b, logger, Settings.EMPTY); assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); settingUpdater.apply(); From 8e0a610c263fc49504b1c41bb230afd1043941d1 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 12:24:20 +0100 Subject: [PATCH 017/322] add simple javadoc reference to usage --- .../common/settings/Setting.java | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 3f182885151..0c0380d24e6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -56,21 +56,21 @@ public class Setting extends ToXContentToBytes { * Returns the settings key or a prefix if this setting is a group setting * @see #isGroupSetting() */ - public String getKey() { + public final String getKey() { return key; } /** * Returns true iff this setting is dynamically updateable, otherwise false */ - public boolean isDynamic() { + public final boolean isDynamic() { return dynamic; } /** * Returns the settings scope */ - public Scope getScope() { + public final Scope getScope() { return scope; } @@ -87,14 +87,14 @@ public class Setting extends ToXContentToBytes { * Returns the default values string representation for this setting. * @param settings a settings object for settings that has a default value depending on another setting if available */ - public String getDefault(Settings settings) { + public final String getDefault(Settings settings) { return defaultValue.apply(settings); } /** * Returns true iff this setting is present in the given settings object. Otherwise false */ - public boolean exists(Settings settings) { + public final boolean exists(Settings settings) { return settings.get(key) != null; } @@ -117,7 +117,7 @@ public class Setting extends ToXContentToBytes { * Returns the raw (string) settings value. If the setting is not present in the given settings object the default value is returned * instead. This is useful if the value can't be parsed due to an invalid value to access the actual value. */ - public String getRaw(Settings settings) { + public final String getRaw(Settings settings) { return settings.get(key, defaultValue.apply(settings)); } @@ -131,7 +131,7 @@ public class Setting extends ToXContentToBytes { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("key", key); builder.field("type", scope.name()); @@ -149,7 +149,7 @@ public class Setting extends ToXContentToBytes { INDEX; } - AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { + final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { return newUpdater(consumer, logger, settings, (s) -> {}); } @@ -161,6 +161,10 @@ public class Setting extends ToXContentToBytes { } } + /** + * this is used for settings that depend on each other... see {@link org.elasticsearch.common.settings.AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)} and it's + * usage for details. + */ static AbstractScopedSettings.SettingUpdater compoundUpdater(final BiConsumer consumer, final Setting
aSettting, final Setting bSetting, ESLogger logger, Settings settings) { final AtomicReference aRef = new AtomicReference<>(); final AtomicReference bRef = new AtomicReference<>(); From ce417540c5f51361699b2cbcb81277e702efc9e6 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 12:24:40 +0100 Subject: [PATCH 018/322] apply review from @clintongormley --- docs/reference/cluster/update-settings.asciidoc | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 2106bdf53b0..8ec58424730 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -62,6 +62,19 @@ the response for the last example will be: "persistent" : {}, "transient" : {} } +-------------------------------------------------- + +Settings can also be reset using simple wildcards. For instance to reset +all dynamic `discovery.zen` setting a prefix can be used: + +[source,js] +-------------------------------------------------- +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "discovery.zen.*" : null + } +}' +-------------------------------------------------- Cluster wide settings can be returned using: From a49120bfc1780c8a1fab432e99ea069328b84f16 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 12:26:28 +0100 Subject: [PATCH 019/322] fix compilation --- .../java/org/elasticsearch/discovery/ec2/Ec2Discovery.java | 5 +++-- .../java/org/elasticsearch/discovery/gce/GceDiscovery.java | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java index 3fd3f1948e8..aa3cef01d03 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java @@ -22,6 +22,7 @@ package org.elasticsearch.discovery.ec2; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -39,10 +40,10 @@ public class Ec2Discovery extends ZenDiscovery { @Inject public Ec2Discovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java index c1a00979fcd..fe87b9244d4 100755 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java @@ -22,6 +22,7 @@ package org.elasticsearch.discovery.gce; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -39,10 +40,10 @@ public class GceDiscovery extends ZenDiscovery { @Inject public GceDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, ClusterSettingsService clusterSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, clusterSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } From 3b85dbb8c806ac1ac3e5e8ce5551d68acc7bce4f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 13:42:35 +0100 Subject: [PATCH 020/322] use raw value directly --- .../main/java/org/elasticsearch/common/settings/Setting.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 0c0380d24e6..d82a8cb1cd5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -226,10 +226,7 @@ public class Setting extends ToXContentToBytes { public boolean prepareApply(Settings settings) { - String newValue = settings.get(key); - if (newValue == null) { - newValue = getRaw(settings); - } + final String newValue = getRaw(settings); if (value.equals(newValue) == false) { T inst = get(settings); try { From 8011e18880c071db32473bc1789dd054619d7233 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 10 Dec 2015 16:45:11 +0100 Subject: [PATCH 021/322] apply simple comments --- .../allocator/BalancedShardsAllocator.java | 7 +++--- .../settings/AbstractScopedSettings.java | 16 -------------- .../common/settings/ClusterSettings.java | 22 +++++++++++++++++++ .../common/settings/Setting.java | 1 + .../discovery/DiscoverySettings.java | 8 +++---- 5 files changed, 29 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index da73cdc1455..e6dc9a65efd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -87,18 +87,17 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards @Inject public BalancedShardsAllocator(Settings settings, ClusterSettings clusterSettings) { super(settings); - weightFunction = new WeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); + setWeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); setThreshold(THRESHOLD_SETTING.get(settings)); clusterSettings.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); clusterSettings.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); } - public void setWeightFunction(float indexBalance, float shardBalanceFactor) { + private void setWeightFunction(float indexBalance, float shardBalanceFactor) { weightFunction = new WeightFunction(indexBalance, shardBalanceFactor); } - - public void setThreshold(float threshold) { + private void setThreshold(float threshold) { this.threshold = threshold; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index af82f97bf0c..7fc8cebd31b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -128,22 +128,6 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } } } - - try { - for (Map.Entry entry : newSettings.getAsMap().entrySet()) { - if (entry.getKey().startsWith("logger.")) { - String component = entry.getKey().substring("logger.".length()); - if ("_root".equals(component)) { - ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); - } else { - ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); - } - } - } - } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, "logger"); - } - return lastSettingsApplied = newSettings; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 602081a1d47..e7b7eb9960d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.*; import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; @@ -48,6 +49,27 @@ public final class ClusterSettings extends AbstractScopedSettings { super(settings, settingsSet, Setting.Scope.CLUSTER); } + + @Override + public synchronized Settings applySettings(Settings newSettings) { + Settings settings = super.applySettings(newSettings); + try { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + if (entry.getKey().startsWith("logger.")) { + String component = entry.getKey().substring("logger.".length()); + if ("_root".equals(component)) { + ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); + } else { + ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); + } + } + } + } catch (Exception e) { + logger.warn("failed to refresh settings for [{}]", e, "logger"); + } + return settings; + } + /** * Returns true if the settings is a logger setting. */ diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index d82a8cb1cd5..1f4e8ed04b1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -45,6 +45,7 @@ public class Setting extends ToXContentToBytes { private final Scope scope; public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; this.parser = parser; diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 519a2330353..6689d9c8688 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -36,6 +36,9 @@ import java.util.EnumSet; */ public class DiscoverySettings extends AbstractComponent { + public final static int NO_MASTER_BLOCK_ID = 2; + public final static ClusterBlock NO_MASTER_BLOCK_ALL = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); + public final static ClusterBlock NO_MASTER_BLOCK_WRITES = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, false, RestStatus.SERVICE_UNAVAILABLE, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); /** * sets the timeout for a complete publishing cycle, including both sending and committing. the master * will continute to process the next cluster state update after this time has elapsed @@ -50,11 +53,6 @@ public class DiscoverySettings extends AbstractComponent { public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.CLUSTER); public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.CLUSTER); - public final static int NO_MASTER_BLOCK_ID = 2; - - public final static ClusterBlock NO_MASTER_BLOCK_ALL = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); - public final static ClusterBlock NO_MASTER_BLOCK_WRITES = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, false, RestStatus.SERVICE_UNAVAILABLE, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); - private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; From 86a18a08fb5b3d8b4f000c71b0f69acea80b0b6f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 10 Dec 2015 18:01:00 +0100 Subject: [PATCH 022/322] Make SettingsUpdater less stateful --- .../settings/AbstractScopedSettings.java | 111 +++++++------ .../common/settings/Setting.java | 157 +++++++----------- .../common/settings/SettingTests.java | 103 +++++------- 3 files changed, 159 insertions(+), 212 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 7fc8cebd31b..e3de252e083 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -32,7 +32,7 @@ import java.util.function.Consumer; * This service offers transactional application of updates settings. */ public abstract class AbstractScopedSettings extends AbstractComponent { - private Settings lastSettingsApplied; + private Settings lastSettingsApplied = Settings.EMPTY; private final List settingUpdaters = new ArrayList<>(); private final Map> groupSettings = new HashMap<>(); private final Map> keySettings = new HashMap<>(); @@ -62,29 +62,22 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * method will not change any settings but will fail if any of the settings can't be applied. */ public synchronized Settings dryRun(Settings settings) { - final Settings build = Settings.builder().put(this.settings).put(settings).build(); - try { - List exceptions = new ArrayList<>(); - for (SettingUpdater settingUpdater : settingUpdaters) { - try { - settingUpdater.prepareApply(build); - } catch (RuntimeException ex) { - exceptions.add(ex); - logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); - } - } - // here we are exhaustive and record all settings that failed. - ExceptionsHelper.rethrowAndSuppress(exceptions); - } finally { - for (SettingUpdater settingUpdater : settingUpdaters) { - try { - settingUpdater.rollback(); - } catch (Exception e) { - logger.error("failed to rollback settings for [{}]", e, settingUpdater); + final Settings current = Settings.builder().put(this.settings).put(settings).build(); + final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); + List exceptions = new ArrayList<>(); + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + if (settingUpdater.hasChanged(current, previous)) { + settingUpdater.getValue(current, previous); } + } catch (RuntimeException ex) { + exceptions.add(ex); + logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); } } - return build; + // here we are exhaustive and record all settings that failed. + ExceptionsHelper.rethrowAndSuppress(exceptions); + return current; } /** @@ -99,34 +92,25 @@ public abstract class AbstractScopedSettings extends AbstractComponent { // nothing changed in the settings, ignore return newSettings; } - final Settings build = Settings.builder().put(this.settings).put(newSettings).build(); - boolean success = false; + final Settings current = Settings.builder().put(this.settings).put(newSettings).build(); + final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); try { + List applyRunnables = new ArrayList<>(); for (SettingUpdater settingUpdater : settingUpdaters) { try { - settingUpdater.prepareApply(build); + applyRunnables.add(settingUpdater.updater(current, previous)); } catch (Exception ex) { logger.warn("failed to prepareCommit settings for [{}]", ex, settingUpdater); throw ex; } } - for (SettingUpdater settingUpdater : settingUpdaters) { - settingUpdater.apply(); + for (Runnable settingUpdater : applyRunnables) { + settingUpdater.run(); } - success = true; } catch (Exception ex) { logger.warn("failed to apply settings", ex); throw ex; } finally { - if (success == false) { - for (SettingUpdater settingUpdater : settingUpdaters) { - try { - settingUpdater.rollback(); - } catch (Exception e) { - logger.error("failed to refresh settings for [{}]", e, settingUpdater); - } - } - } } return lastSettingsApplied = newSettings; } @@ -141,7 +125,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { if (setting != get(setting.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); } - this.settingUpdaters.add(setting.newUpdater(consumer, logger, settings, predicate)); + this.settingUpdaters.add(setting.newUpdater(consumer, logger, predicate)); } /** @@ -159,7 +143,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { if (b != get(b.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]"); } - this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger, settings)); + this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger)); } /** @@ -176,24 +160,51 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Transactional interface to update settings. * @see Setting */ - public interface SettingUpdater { - /** - * Prepares applying the given settings to this updater. All the heavy lifting like parsing and validation - * happens in this method. Yet the actual setting should not be changed by this call. - * @param settings the settings to apply - * @return true if this updater will update a setting on calling {@link #apply()} otherwise false - */ - boolean prepareApply(Settings settings); + public interface SettingUpdater { /** - * Applies the settings passed to {@link #prepareApply(Settings)} + * Returns true if this updaters setting has changed with the current update + * @param current the current settings + * @param previous the previous setting + * @return true if this updaters setting has changed with the current update */ - void apply(); + boolean hasChanged(Settings current, Settings previous); /** - * Rolls back to the state before {@link #prepareApply(Settings)} was called. All internal prepared state is cleared after this call. + * Returns the instance value for the current settings. This method is stateless and idempotent. */ - void rollback(); + T getValue(Settings current, Settings previous); + + /** + * Applies the given value to the updater. This methods will actually run the update. + */ + void apply(T value, Settings current, Settings previous); + + /** + * Updates this updaters value if it has changed. + * @return true iff the value has been updated. + */ + default boolean apply(Settings current, Settings previous) { + if (hasChanged(current, previous)) { + T value = getValue(current, previous); + apply(value, current, previous); + return true; + } + return false; + } + + /** + * Returns a callable runnable that calls {@link #apply(Object, Settings, Settings)} if the settings + * actually changed. This allows to defer the update to a later point in time while keeping type safety. + * If the value didn't change the returned runnable is a noop. + */ + default Runnable updater(Settings current, Settings previous) { + if (hasChanged(current, previous)) { + T value = getValue(current, previous); + return () -> { apply(value, current, previous);}; + } + return () -> {}; + } } /** diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1f4e8ed04b1..b164c906e7a 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; @@ -150,13 +151,13 @@ public class Setting extends ToXContentToBytes { INDEX; } - final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings) { - return newUpdater(consumer, logger, settings, (s) -> {}); + final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { + return newUpdater(consumer, logger, (s) -> {}); } - AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer accept) { if (isDynamic()) { - return new Updater(consumer, logger, settings, accept); + return new Updater(consumer, logger, accept); } else { throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); } @@ -166,39 +167,23 @@ public class Setting extends ToXContentToBytes { * this is used for settings that depend on each other... see {@link org.elasticsearch.common.settings.AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)} and it's * usage for details. */ - static AbstractScopedSettings.SettingUpdater compoundUpdater(final BiConsumer consumer, final Setting aSettting, final Setting bSetting, ESLogger logger, Settings settings) { - final AtomicReference aRef = new AtomicReference<>(); - final AtomicReference bRef = new AtomicReference<>(); - final AbstractScopedSettings.SettingUpdater aSettingUpdater = aSettting.newUpdater(aRef::set, logger, settings); - final AbstractScopedSettings.SettingUpdater bSettingUpdater = bSetting.newUpdater(bRef::set, logger, settings); - return new AbstractScopedSettings.SettingUpdater() { - boolean aHasChanged = false; - boolean bHasChanged = false; + static AbstractScopedSettings.SettingUpdater> compoundUpdater(final BiConsumer consumer, final Setting aSettting, final Setting bSetting, ESLogger logger) { + final AbstractScopedSettings.SettingUpdater aSettingUpdater = aSettting.newUpdater(null, logger); + final AbstractScopedSettings.SettingUpdater bSettingUpdater = bSetting.newUpdater(null, logger); + return new AbstractScopedSettings.SettingUpdater>() { @Override - public boolean prepareApply(Settings settings) { - aHasChanged = aSettingUpdater.prepareApply(settings); - bHasChanged = bSettingUpdater.prepareApply(settings); - return aHasChanged || bHasChanged; + public boolean hasChanged(Settings current, Settings previous) { + return aSettingUpdater.hasChanged(current, previous) || bSettingUpdater.hasChanged(current, previous); } @Override - public void apply() { - aSettingUpdater.apply(); - bSettingUpdater.apply(); - if (aHasChanged || bHasChanged) { - consumer.accept(aRef.get(), bRef.get()); - } + public Tuple getValue(Settings current, Settings previous) { + return new Tuple<>(aSettingUpdater.getValue(current, previous), bSettingUpdater.getValue(current, previous)); } @Override - public void rollback() { - try { - aRef.set(null); - aSettingUpdater.rollback(); - } finally { - bRef.set(null); - bSettingUpdater.rollback(); - } + public void apply(Tuple value, Settings current, Settings previous) { + consumer.accept(value.v1(), value.v2()); } @Override @@ -209,63 +194,47 @@ public class Setting extends ToXContentToBytes { } - private class Updater implements AbstractScopedSettings.SettingUpdater { + private class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final ESLogger logger; private final Consumer accept; - private String value; - private boolean commitPending; - private String pendingValue; - private T valueInstance; - public Updater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { + public Updater(Consumer consumer, ESLogger logger, Consumer accept) { this.consumer = consumer; this.logger = logger; - value = getRaw(settings); this.accept = accept; } - - public boolean prepareApply(Settings settings) { - final String newValue = getRaw(settings); - if (value.equals(newValue) == false) { - T inst = get(settings); - try { - accept.accept(inst); - } catch (Exception | AssertionError e) { - throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + getRaw(settings) + "]", e); - } - pendingValue = newValue; - valueInstance = inst; - commitPending = true; - - } else { - commitPending = false; - } - return commitPending; - } - - public void apply() { - if (commitPending) { - logger.info("update [{}] from [{}] to [{}]", key, value, pendingValue); - value = pendingValue; - consumer.accept(valueInstance); - } - commitPending = false; - valueInstance = null; - pendingValue = null; - } - - public void rollback() { - commitPending = false; - valueInstance = null; - pendingValue = null; - } - @Override public String toString() { return "Updater for: " + Setting.this.toString(); } + + @Override + public boolean hasChanged(Settings current, Settings previous) { + final String newValue = getRaw(current); + final String value = getRaw(previous); + return value.equals(newValue) == false; + } + + @Override + public T getValue(Settings current, Settings previous) { + final String newValue = getRaw(current); + final String value = getRaw(previous); + T inst = get(current); + try { + accept.accept(inst); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + newValue + "]", e); + } + return inst; + } + + @Override + public void apply(T value, Settings current, Settings previous) { + logger.info("update [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); + consumer.accept(value); + } } @@ -329,43 +298,35 @@ public class Setting extends ToXContentToBytes { } @Override - public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Settings settings, Consumer accept) { + public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer accept) { if (isDynamic() == false) { throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); } final Setting setting = this; - return new AbstractScopedSettings.SettingUpdater() { - private Settings pendingSettings; - private Settings committedSettings = get(settings); + return new AbstractScopedSettings.SettingUpdater() { @Override - public boolean prepareApply(Settings settings) { - Settings currentSettings = get(settings); - if (currentSettings.equals(committedSettings) == false) { - try { - accept.accept(currentSettings); - } catch (Exception | AssertionError e) { - throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + committedSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e); - } - pendingSettings = currentSettings; - return true; - } else { - return false; - } + public boolean hasChanged(Settings current, Settings previous) { + Settings currentSettings = get(current); + Settings previousSettings = get(previous); + return currentSettings.equals(previousSettings) == false; } @Override - public void apply() { - if (pendingSettings != null) { - consumer.accept(pendingSettings); - committedSettings = pendingSettings; + public Settings getValue(Settings current, Settings previous) { + Settings currentSettings = get(current); + Settings previousSettings = get(previous); + try { + accept.accept(currentSettings); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + previousSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e); } - pendingSettings = null; + return currentSettings; } @Override - public void rollback() { - pendingSettings = null; + public void apply(Settings value, Settings current, Settings previous) { + consumer.accept(value); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 3f048e454a5..d8ac616eccb 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -42,38 +43,33 @@ public class SettingTests extends ESTestCase { ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); AtomicReference value = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); try { - settingUpdater.prepareApply(Settings.builder().put("a.byte.size", 12).build()); + settingUpdater.apply(Settings.builder().put("a.byte.size", 12).build(), Settings.EMPTY); fail("no unit"); } catch (ElasticsearchParseException ex) { assertEquals("failed to parse setting [a.byte.size] with value [12] as a size in bytes: unit is missing or unrecognized", ex.getMessage()); } - assertTrue(settingUpdater.prepareApply(Settings.builder().put("a.byte.size", "12b").build())); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(Settings.builder().put("a.byte.size", "12b").build(), Settings.EMPTY)); assertEquals(new ByteSizeValue(12), value.get()); } public void testSimpleUpdate() { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference atomicBoolean = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); - settingUpdater.prepareApply(build); - assertNull(atomicBoolean.get()); - settingUpdater.rollback(); + settingUpdater.apply(build, Settings.EMPTY); assertNull(atomicBoolean.get()); build = Settings.builder().put("foo.bar", true).build(); - settingUpdater.prepareApply(build); - assertNull(atomicBoolean.get()); - settingUpdater.apply(); + settingUpdater.apply(build, Settings.EMPTY); assertTrue(atomicBoolean.get()); // try update bogus value build = Settings.builder().put("foo.bar", "I am not a boolean").build(); try { - settingUpdater.prepareApply(build); + settingUpdater.apply(build, Settings.EMPTY); fail("not a boolean"); } catch (IllegalArgumentException ex) { assertEquals("Failed to parse value [I am not a boolean] for setting [foo.bar]", ex.getMessage()); @@ -85,7 +81,7 @@ public class SettingTests extends ESTestCase { assertFalse(booleanSetting.isGroupSetting()); AtomicReference atomicBoolean = new AtomicReference<>(null); try { - booleanSetting.newUpdater(atomicBoolean::set, logger, Settings.EMPTY); + booleanSetting.newUpdater(atomicBoolean::set, logger); fail("not dynamic"); } catch (IllegalStateException ex) { assertEquals("setting [foo.bar] is not dynamic", ex.getMessage()); @@ -96,11 +92,9 @@ public class SettingTests extends ESTestCase { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger, Settings.EMPTY); - settingUpdater.prepareApply(Settings.builder().put("foo.bar", true).build()); - assertNull(ab1.get()); - assertNull(ab2.get()); - settingUpdater.apply(); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); + ClusterSettings.SettingUpdater settingUpdater2 = booleanSetting.newUpdater(ab2::set, logger); + settingUpdater.apply(Settings.builder().put("foo.bar", true).build(), Settings.EMPTY); assertTrue(ab1.get()); assertNull(ab2.get()); } @@ -124,40 +118,22 @@ public class SettingTests extends ESTestCase { assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); - assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); - settingUpdater.apply(); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertSame("no update - type has not changed", type, ref.get()); // change from default - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar", "2").build())); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(Settings.builder().put("foo.bar", "2").build(), Settings.EMPTY)); assertNotSame("update - type has changed", type, ref.get()); assertEquals("2", ref.get().foo); // change back to default... - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.baz", "2").build())); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(Settings.EMPTY, Settings.builder().put("foo.bar", "2").build())); assertNotSame("update - type has changed", type, ref.get()); assertEquals("", ref.get().foo); } - public void testRollback() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); - assertFalse(integerSetting.isGroupSetting()); - AtomicReference ref = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = integerSetting.newUpdater(ref::set, logger, Settings.EMPTY); - assertNull(ref.get()); - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.int.bar", "2").build())); - settingUpdater.rollback(); - settingUpdater.apply(); - assertNull(ref.get()); - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.int.bar", "2").build())); - settingUpdater.apply(); - assertEquals(2, ref.get().intValue()); - } - public void testType() { Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); assertEquals(integerSetting.getScope(), Setting.Scope.CLUSTER); @@ -169,10 +145,11 @@ public class SettingTests extends ESTestCase { AtomicReference ref = new AtomicReference<>(null); Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); assertTrue(setting.isGroupSetting()); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build())); - settingUpdater.apply(); + Settings currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); + Settings previousInput = Settings.EMPTY; + assertTrue(settingUpdater.apply(currentInput, previousInput)); assertNotNull(ref.get()); Settings settings = ref.get(); Map asMap = settings.getAsGroups(); @@ -181,14 +158,16 @@ public class SettingTests extends ESTestCase { assertEquals(asMap.get("2").get("value"), "2"); assertEquals(asMap.get("3").get("value"), "3"); + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); Settings current = ref.get(); - assertFalse(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build())); - settingUpdater.apply(); + assertFalse(settingUpdater.apply(currentInput, previousInput)); assertSame(current, ref.get()); + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(); // now update and check that we got it - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build())); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(currentInput, previousInput)); assertNotSame(current, ref.get()); asMap = ref.get().getAsGroups(); @@ -196,9 +175,10 @@ public class SettingTests extends ESTestCase { assertEquals(asMap.get("1").get("value"), "1"); assertEquals(asMap.get("2").get("value"), "2"); + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "4").build(); // now update and check that we got it - assertTrue(settingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "4").build())); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(currentInput, previousInput)); assertNotSame(current, ref.get()); asMap = ref.get().getAsGroups(); @@ -209,9 +189,9 @@ public class SettingTests extends ESTestCase { assertTrue(setting.match("foo.bar.baz")); assertFalse(setting.match("foo.baz.bar")); - ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger, Settings.EMPTY, (s) -> assertFalse(true)); + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); try { - predicateSettingUpdater.prepareApply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build()); + predicateSettingUpdater.apply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(), Settings.EMPTY); fail("not accepted"); } catch (IllegalArgumentException ex) { assertEquals(ex.getMessage(), "illegal value can't update [foo.bar.] from [{}] to [{1.value=1, 2.value=2}]"); @@ -243,32 +223,27 @@ public class SettingTests extends ESTestCase { Composite c = new Composite(); Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.CLUSTER); Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.CLUSTER); - ClusterSettings.SettingUpdater settingUpdater = Setting.compoundUpdater(c::set, a, b, logger, Settings.EMPTY); - assertFalse(settingUpdater.prepareApply(Settings.EMPTY)); - settingUpdater.apply(); + ClusterSettings.SettingUpdater> settingUpdater = Setting.compoundUpdater(c::set, a, b, logger); + assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertNull(c.a); assertNull(c.b); Settings build = Settings.builder().put("foo.int.bar.a", 2).build(); - assertTrue(settingUpdater.prepareApply(build)); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(build, Settings.EMPTY)); assertEquals(2, c.a.intValue()); - assertNull(c.b); + assertEquals(1, c.b.intValue()); Integer aValue = c.a; - assertFalse(settingUpdater.prepareApply(build)); - settingUpdater.apply(); + assertFalse(settingUpdater.apply(build, build)); assertSame(aValue, c.a); - + Settings previous = build; build = Settings.builder().put("foo.int.bar.a", 2).put("foo.int.bar.b", 5).build(); - assertTrue(settingUpdater.prepareApply(build)); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(build, previous)); assertEquals(2, c.a.intValue()); assertEquals(5, c.b.intValue()); // reset to default - assertTrue(settingUpdater.prepareApply(Settings.EMPTY)); - settingUpdater.apply(); + assertTrue(settingUpdater.apply(Settings.EMPTY, build)); assertEquals(1, c.a.intValue()); assertEquals(1, c.b.intValue()); From 9e37322377315d7775d67a409e926009c1c9cba0 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 10 Dec 2015 18:02:51 +0100 Subject: [PATCH 023/322] add javadocs --- .../common/settings/AbstractScopedSettings.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index e3de252e083..ff57bcec2b5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -120,12 +120,14 @@ public abstract class AbstractScopedSettings extends AbstractComponent { *

* Note: Only settings registered in {@link SettingsModule} can be changed dynamically. *

+ * @param validator an additional validator that is only applied to updates of this setting. + * This is useful to add additional validation to settings at runtime compared to at startup time. */ - public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer predicate) { + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer validator) { if (setting != get(setting.getKey())) { throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); } - this.settingUpdaters.add(setting.newUpdater(consumer, logger, predicate)); + this.settingUpdaters.add(setting.newUpdater(consumer, logger, validator)); } /** From 8c7e142eb0fe6bf94a795b024fb334f5802c7921 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 10 Dec 2015 18:05:51 +0100 Subject: [PATCH 024/322] add javadocs --- .../java/org/elasticsearch/common/settings/Setting.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index b164c906e7a..d1be23d3d9b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -45,6 +45,14 @@ public class Setting extends ToXContentToBytes { private final boolean dynamic; private final Scope scope; + /** + * Creates a new Setting instance + * @param key the settings key for this setting. + * @param defaultValue a default value function that returns the default values string representation. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; From bc140659a01f7981dd4de930f2aa11ff5e378249 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 11 Dec 2015 11:03:44 -0500 Subject: [PATCH 025/322] fail build on wildcard imports Wildcard imports are terrible, they cause ambiguity in the code, make it not compile with the future versions of java in many cases. We should simply fail the build on this, it is messiness, caused by messy Intellij configuration --- .../elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy | 1 + 1 file changed, 1 insertion(+) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy index 5fa63956b57..67aa26c28ad 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy @@ -61,6 +61,7 @@ public class ForbiddenPatternsTask extends DefaultTask { // add mandatory rules patterns.put('nocommit', /nocommit/) patterns.put('tab', /\t/) + patterns.put('wildcard imports', /^\s*import.*\.\*/) } /** Adds a file glob pattern to be excluded */ From 783ba1908678cca3021eaf1e741bdfd3da950ee7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Dec 2015 11:44:49 -0500 Subject: [PATCH 026/322] Update contributing.md for forbidding import foo.* --- CONTRIBUTING.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 507a27a5912..2b06e17618e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,7 +84,9 @@ Please follow these formatting guidelines: * Line width is 140 characters * The rest is left to Java coding standards * Disable “auto-format on save†to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. -* Don't worry too much about imports. Try not to change the order but don't worry about fighting your IDE to stop it from switching from * imports to specific imports or from specific to * imports. +* Wildcard imports (`improt foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. + * Eclipse: Preferences->Java->Code Style->Organize Imports. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value. +* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. To create a distribution from the source, simply run: From 0f518e1b07059293b7ab94c5494ff86f531053a4 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 11 Dec 2015 13:23:33 -0500 Subject: [PATCH 027/322] fix typo --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2b06e17618e..070ea23d4e0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,7 +84,7 @@ Please follow these formatting guidelines: * Line width is 140 characters * The rest is left to Java coding standards * Disable “auto-format on save†to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. -* Wildcard imports (`improt foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. +* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. * Eclipse: Preferences->Java->Code Style->Organize Imports. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value. * Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. From 6b39ff608122caa66089f98aa30c89461378e7d3 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 23 Nov 2015 14:56:50 -0500 Subject: [PATCH 028/322] Only trace log shard not available exceptions This commit changes the behavior of the logging in TransportBroadcastByNodeAction#onNodeFailure to only trace log exceptions that are considered shard-not-available exceptions. This makes the logging consistent with how these exceptions are handled in the response. Relates #14927 --- .../node/TransportBroadcastByNodeAction.java | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index bc78f13433f..4799eed9154 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -19,8 +19,16 @@ package org.elasticsearch.action.support.broadcast.node; -import org.elasticsearch.action.*; -import org.elasticsearch.action.support.*; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; @@ -37,7 +45,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.NodeShouldNotConnectException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; @@ -384,7 +399,15 @@ public abstract class TransportBroadcastByNodeAction Date: Mon, 14 Dec 2015 17:20:22 -0500 Subject: [PATCH 029/322] Explicitly log cluster state update failures This commit adds explicit logging at the DEBUG level for cluster state update failures. Currently this responsibility is left to the cluster state task listener, but we should expliclty log these with a generic message to address cases where the listener might not. Relates #14899, relates #15016, relates #15023 --- .../service/InternalClusterService.java | 48 +++++++++++++++++-- 1 file changed, 43 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index d4b15861846..afdfea65328 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -20,8 +20,19 @@ package org.elasticsearch.cluster.service; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; @@ -42,7 +53,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.*; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; @@ -50,8 +67,19 @@ import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -292,6 +320,7 @@ public class InternalClusterService extends AbstractLifecycleComponent threadPool.generic().execute(() -> { if (updateTask.processed.getAndSet(true) == false) { + logger.debug("cluster state update task [{}] timed out after [{}]", source, config.timeout()); listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source)); }})); } else { @@ -413,6 +442,9 @@ public class InternalClusterService extends AbstractLifecycleComponent finalBatchResult = batchResult; + assert toExecute.stream().map(updateTask -> updateTask.task).allMatch(finalBatchResult.executionResults::containsKey); ClusterState newClusterState = batchResult.resultingState; final ArrayList> proccessedListeners = new ArrayList<>(); @@ -421,7 +453,13 @@ public class InternalClusterService extends AbstractLifecycleComponent proccessedListeners.add(updateTask), ex -> updateTask.listener.onFailure(updateTask.source, ex)); + executionResult.handle( + () -> proccessedListeners.add(updateTask), + ex -> { + logger.debug("cluster state update task [{}] failed", ex, updateTask.source); + updateTask.listener.onFailure(updateTask.source, ex); + } + ); } if (previousClusterState == newClusterState) { From 2e721a0328a1741a9080dc3eb625255a0cc320b1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 14 Dec 2015 18:54:35 -0500 Subject: [PATCH 030/322] Fix IntelliJ query builder type inference issues This commit addresses two type inference issues that the IntelliJ source editor struggles with when registering query builder prototypes in o/e/i/q/IndicesQueriesRegistry.java and o/e/i/q/f/ScoreFunctionParserMapper.java. --- .../query/functionscore/ScoreFunctionParserMapper.java | 9 ++++----- .../indices/query/IndicesQueriesRegistry.java | 8 +++++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java index c528c0007f2..e7ce9b90e2b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java @@ -19,13 +19,11 @@ package org.elasticsearch.index.query.functionscore; -import java.util.Map; - import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.XContentLocation; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; @@ -74,11 +72,12 @@ public class ScoreFunctionParserMapper { return functionParsers.get(parserName); } - private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { + private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { for (String name : scoreFunctionParser.getNames()) { map.put(name, scoreFunctionParser); } - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, scoreFunctionParser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable sfb = scoreFunctionParser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, sfb); } } diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index 0cec415d63b..08b7b34e91a 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.query; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.EmptyQueryBuilder; @@ -40,11 +41,12 @@ public class IndicesQueriesRegistry extends AbstractComponent { public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers, NamedWriteableRegistry namedWriteableRegistry) { super(settings); Map> queryParsers = new HashMap<>(); - for (QueryParser queryParser : injectedQueryParsers) { + for (@SuppressWarnings("unchecked") QueryParser queryParser : injectedQueryParsers) { for (String name : queryParser.names()) { queryParsers.put(name, queryParser); } - namedWriteableRegistry.registerPrototype(QueryBuilder.class, queryParser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable qb = queryParser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, qb); } // EmptyQueryBuilder is not registered as query parser but used internally. // We need to register it with the NamedWriteableRegistry in order to serialize it @@ -58,4 +60,4 @@ public class IndicesQueriesRegistry extends AbstractComponent { public Map> queryParsers() { return queryParsers; } -} \ No newline at end of file +} From 2d42e99c7acd79817ac95beb92bbd2a509e5016d Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 15 Dec 2015 00:05:14 -0500 Subject: [PATCH 031/322] smoke test plugins does not test any plugins Currently the build has a bug and it loads 0 plugins. --- qa/smoke-test-plugins/build.gradle | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index 70611aed371..9d8e3950a83 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -22,15 +22,16 @@ import org.elasticsearch.gradle.MavenFilteringHack apply plugin: 'elasticsearch.rest-test' ext.pluginsCount = 0 -project.rootProject.subprojects.findAll { it.path.startsWith(':projects:') }.each { subproj -> +project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each { subproj -> integTest { cluster { // need to get a non-decorated project object, so must re-lookup the project by path plugin subproj.name, project(subproj.path) } } - pluginCount += 1 + pluginsCount += 1 } +assert pluginsCount > 0 ext.expansions = [ 'expected.plugins.count': pluginsCount From 60d35c81af8662c5771854e7e4fbc4a3dd3d78c1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 14 Dec 2015 15:33:08 -0800 Subject: [PATCH 032/322] Plugins: Expose http.type setting, and collapse al(most all) modules relating to transport/http This change adds back the http.type setting. It also cleans up all the transport related guice code to be consolidated within the NetworkModule (as transport and http related stuff is what and how ES exposes over the network). The setter methods previously used by some plugins to override eg the TransportService or HttpServerTransport are removed, and those plugins should now register a custom implementation of the class with a name and set that using the appropriate config setting. Note that I think ActionModule should also be moved into here, to sit along side the rest actions, but I left that for a followup. closes #14148 --- .../transport/ClientTransportModule.java | 37 -- .../client/transport/TransportClient.java | 11 +- .../common/network/NetworkModule.java | 347 +++++++++++++++++- .../elasticsearch/http/HttpServerModule.java | 59 --- .../java/org/elasticsearch/node/Node.java | 17 +- .../org/elasticsearch/rest/RestModule.java | 51 --- .../rest/action/RestActionModule.java | 273 -------------- .../transport/TransportModule.java | 122 ------ .../TransportClientHeadersTests.java | 8 +- .../common/inject/ModuleTestCase.java | 18 + .../common/network/NetworkModuleTests.java | 176 +++++++++ .../plugins/PluggableTransportModuleIT.java | 112 ------ .../TestResponseHeaderPlugin.java | 6 +- .../transport/TransportModuleTests.java | 48 +++ .../transport/netty/NettyTransportIT.java | 10 +- .../NettyTransportMultiPortIntegrationIT.java | 8 +- .../netty/NettyTransportPublishAddressIT.java | 7 +- .../messy/tests/IndicesRequestTests.java | 29 +- .../deletebyquery/DeleteByQueryPlugin.java | 10 +- .../test/ESBackcompatTestCase.java | 10 +- .../org/elasticsearch/test/ExternalNode.java | 6 +- .../transport/AssertingLocalTransport.java | 8 +- .../test/transport/MockTransportService.java | 8 +- 23 files changed, 648 insertions(+), 733 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/client/transport/ClientTransportModule.java delete mode 100644 core/src/main/java/org/elasticsearch/http/HttpServerModule.java delete mode 100644 core/src/main/java/org/elasticsearch/rest/RestModule.java delete mode 100644 core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java delete mode 100644 core/src/main/java/org/elasticsearch/transport/TransportModule.java create mode 100644 core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java delete mode 100644 core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java create mode 100644 core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java diff --git a/core/src/main/java/org/elasticsearch/client/transport/ClientTransportModule.java b/core/src/main/java/org/elasticsearch/client/transport/ClientTransportModule.java deleted file mode 100644 index 895b3d844f6..00000000000 --- a/core/src/main/java/org/elasticsearch/client/transport/ClientTransportModule.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.client.transport; - -import org.elasticsearch.client.support.Headers; -import org.elasticsearch.client.transport.support.TransportProxyClient; -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class ClientTransportModule extends AbstractModule { - - @Override - protected void configure() { - bind(Headers.class).asEagerSingleton(); - bind(TransportProxyClient.class).asEagerSingleton(); - bind(TransportClientNodesService.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 33cf3479419..3b8be668f43 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -32,7 +32,6 @@ import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; @@ -43,19 +42,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; @@ -69,7 +64,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; * The transport client allows to create a client that is not part of the cluster, but simply connects to one * or more nodes directly by adding their respective addresses using {@link #addTransportAddress(org.elasticsearch.common.transport.TransportAddress)}. *

- * The transport client important modules used is the {@link org.elasticsearch.transport.TransportModule} which is + * The transport client important modules used is the {@link org.elasticsearch.common.network.NetworkModule} which is * started in client mode (only connects, no bind). */ public class TransportClient extends AbstractClient { @@ -143,10 +138,9 @@ public class TransportClient extends AbstractClient { } modules.add(new PluginsModule(pluginsService)); modules.add(new SettingsModule(this.settings, settingsFilter )); - modules.add(new NetworkModule(networkService)); + modules.add(new NetworkModule(networkService, this.settings, true)); modules.add(new ClusterNameModule(this.settings)); modules.add(new ThreadPoolModule(threadPool)); - modules.add(new TransportModule(this.settings)); modules.add(new SearchModule() { @Override protected void configure() { @@ -154,7 +148,6 @@ public class TransportClient extends AbstractClient { } }); modules.add(new ActionModule(true)); - modules.add(new ClientTransportModule()); modules.add(new CircuitBreakerModule(this.settings)); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index c1f282ac234..f7eab3da2ac 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -19,21 +19,362 @@ package org.elasticsearch.common.network; +import org.elasticsearch.client.support.Headers; +import org.elasticsearch.client.transport.TransportClientNodesService; +import org.elasticsearch.client.transport.support.TransportProxyClient; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.ExtensionPoint; +import org.elasticsearch.http.HttpServer; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.netty.NettyHttpServerTransport; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction; +import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; +import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; +import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; +import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction; +import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; +import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction; +import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction; +import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction; +import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction; +import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; +import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction; +import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction; +import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction; +import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction; +import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction; +import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction; +import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction; +import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction; +import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction; +import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction; +import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction; +import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction; +import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction; +import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction; +import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction; +import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction; +import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction; +import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction; +import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction; +import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; +import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction; +import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; +import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction; +import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction; +import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction; +import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; +import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction; +import org.elasticsearch.rest.action.bulk.RestBulkAction; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestAliasAction; +import org.elasticsearch.rest.action.cat.RestAllocationAction; +import org.elasticsearch.rest.action.cat.RestCatAction; +import org.elasticsearch.rest.action.cat.RestFielddataAction; +import org.elasticsearch.rest.action.cat.RestHealthAction; +import org.elasticsearch.rest.action.cat.RestIndicesAction; +import org.elasticsearch.rest.action.cat.RestMasterAction; +import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; +import org.elasticsearch.rest.action.cat.RestNodesAction; +import org.elasticsearch.rest.action.cat.RestPluginsAction; +import org.elasticsearch.rest.action.cat.RestRepositoriesAction; +import org.elasticsearch.rest.action.cat.RestSegmentsAction; +import org.elasticsearch.rest.action.cat.RestShardsAction; +import org.elasticsearch.rest.action.cat.RestSnapshotAction; +import org.elasticsearch.rest.action.cat.RestThreadPoolAction; +import org.elasticsearch.rest.action.delete.RestDeleteAction; +import org.elasticsearch.rest.action.explain.RestExplainAction; +import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction; +import org.elasticsearch.rest.action.get.RestGetAction; +import org.elasticsearch.rest.action.get.RestGetSourceAction; +import org.elasticsearch.rest.action.get.RestHeadAction; +import org.elasticsearch.rest.action.get.RestMultiGetAction; +import org.elasticsearch.rest.action.index.RestIndexAction; +import org.elasticsearch.rest.action.main.RestMainAction; +import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; +import org.elasticsearch.rest.action.percolate.RestPercolateAction; +import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction; +import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction; +import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; +import org.elasticsearch.rest.action.search.RestClearScrollAction; +import org.elasticsearch.rest.action.search.RestMultiSearchAction; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.rest.action.search.RestSearchScrollAction; +import org.elasticsearch.rest.action.suggest.RestSuggestAction; +import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction; +import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction; +import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction; +import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; +import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; +import org.elasticsearch.rest.action.update.RestUpdateAction; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.elasticsearch.transport.netty.NettyTransport; + +import java.util.Arrays; +import java.util.List; /** - * + * A module to handle registering and binding all network related classes. */ public class NetworkModule extends AbstractModule { - private final NetworkService networkService; + public static final String TRANSPORT_TYPE_KEY = "transport.type"; + public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type"; - public NetworkModule(NetworkService networkService) { + public static final String LOCAL_TRANSPORT = "local"; + public static final String NETTY_TRANSPORT = "netty"; + + public static final String HTTP_TYPE_KEY = "http.type"; + public static final String HTTP_ENABLED = "http.enabled"; + + private static final List> builtinRestHandlers = Arrays.asList( + RestMainAction.class, + + RestNodesInfoAction.class, + RestNodesStatsAction.class, + RestNodesHotThreadsAction.class, + RestClusterStatsAction.class, + RestClusterStateAction.class, + RestClusterHealthAction.class, + RestClusterUpdateSettingsAction.class, + RestClusterGetSettingsAction.class, + RestClusterRerouteAction.class, + RestClusterSearchShardsAction.class, + RestPendingClusterTasksAction.class, + RestPutRepositoryAction.class, + RestGetRepositoriesAction.class, + RestDeleteRepositoryAction.class, + RestVerifyRepositoryAction.class, + RestGetSnapshotsAction.class, + RestCreateSnapshotAction.class, + RestRestoreSnapshotAction.class, + RestDeleteSnapshotAction.class, + RestSnapshotsStatusAction.class, + + RestIndicesExistsAction.class, + RestTypesExistsAction.class, + RestGetIndicesAction.class, + RestIndicesStatsAction.class, + RestIndicesSegmentsAction.class, + RestIndicesShardStoresAction.class, + RestGetAliasesAction.class, + RestAliasesExistAction.class, + RestIndexDeleteAliasesAction.class, + RestIndexPutAliasAction.class, + RestIndicesAliasesAction.class, + RestGetIndicesAliasesAction.class, + RestCreateIndexAction.class, + RestDeleteIndexAction.class, + RestCloseIndexAction.class, + RestOpenIndexAction.class, + + RestUpdateSettingsAction.class, + RestGetSettingsAction.class, + + RestAnalyzeAction.class, + RestGetIndexTemplateAction.class, + RestPutIndexTemplateAction.class, + RestDeleteIndexTemplateAction.class, + RestHeadIndexTemplateAction.class, + + RestPutWarmerAction.class, + RestDeleteWarmerAction.class, + RestGetWarmerAction.class, + + RestPutMappingAction.class, + RestGetMappingAction.class, + RestGetFieldMappingAction.class, + + RestRefreshAction.class, + RestFlushAction.class, + RestSyncedFlushAction.class, + RestForceMergeAction.class, + RestUpgradeAction.class, + RestClearIndicesCacheAction.class, + + RestIndexAction.class, + RestGetAction.class, + RestGetSourceAction.class, + RestHeadAction.class, + RestMultiGetAction.class, + RestDeleteAction.class, + org.elasticsearch.rest.action.count.RestCountAction.class, + RestSuggestAction.class, + RestTermVectorsAction.class, + RestMultiTermVectorsAction.class, + RestBulkAction.class, + RestUpdateAction.class, + RestPercolateAction.class, + RestMultiPercolateAction.class, + + RestSearchAction.class, + RestSearchScrollAction.class, + RestClearScrollAction.class, + RestMultiSearchAction.class, + RestRenderSearchTemplateAction.class, + + RestValidateQueryAction.class, + + RestExplainAction.class, + + RestRecoveryAction.class, + + // Templates API + RestGetSearchTemplateAction.class, + RestPutSearchTemplateAction.class, + RestDeleteSearchTemplateAction.class, + + // Scripts API + RestGetIndexedScriptAction.class, + RestPutIndexedScriptAction.class, + RestDeleteIndexedScriptAction.class, + + RestFieldStatsAction.class, + + // no abstract cat action + RestCatAction.class + ); + + private static final List> builtinCatHandlers = Arrays.asList( + RestAllocationAction.class, + RestShardsAction.class, + RestMasterAction.class, + RestNodesAction.class, + RestIndicesAction.class, + RestSegmentsAction.class, + // Fully qualified to prevent interference with rest.action.count.RestCountAction + org.elasticsearch.rest.action.cat.RestCountAction.class, + // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction + org.elasticsearch.rest.action.cat.RestRecoveryAction.class, + RestHealthAction.class, + org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class, + RestAliasAction.class, + RestThreadPoolAction.class, + RestPluginsAction.class, + RestFielddataAction.class, + RestNodeAttrsAction.class, + RestRepositoriesAction.class, + RestSnapshotAction.class + ); + + private final NetworkService networkService; + private final Settings settings; + private final boolean transportClient; + + private final ExtensionPoint.SelectedType transportServiceTypes = new ExtensionPoint.SelectedType<>("transport_service", TransportService.class); + private final ExtensionPoint.SelectedType transportTypes = new ExtensionPoint.SelectedType<>("transport", Transport.class); + private final ExtensionPoint.SelectedType httpTransportTypes = new ExtensionPoint.SelectedType<>("http_transport", HttpServerTransport.class); + private final ExtensionPoint.ClassSet restHandlers = new ExtensionPoint.ClassSet<>("rest_handler", RestHandler.class); + // we must separate the cat rest handlers so RestCatAction can collect them... + private final ExtensionPoint.ClassSet catHandlers = new ExtensionPoint.ClassSet<>("cat_handler", AbstractCatAction.class); + + /** + * Creates a network module that custom networking classes can be plugged into. + * + * @param networkService A constructed network service object to bind. + * @param settings The settings for the node + * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. + */ + public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient) { this.networkService = networkService; + this.settings = settings; + this.transportClient = transportClient; + registerTransportService(NETTY_TRANSPORT, TransportService.class); + registerTransport(LOCAL_TRANSPORT, LocalTransport.class); + registerTransport(NETTY_TRANSPORT, NettyTransport.class); + + if (transportClient == false) { + registerHttpTransport(NETTY_TRANSPORT, NettyHttpServerTransport.class); + + for (Class catAction : builtinCatHandlers) { + catHandlers.registerExtension(catAction); + } + for (Class restAction : builtinRestHandlers) { + restHandlers.registerExtension(restAction); + } + } + } + + /** Adds a transport service implementation that can be selected by setting {@link #TRANSPORT_SERVICE_TYPE_KEY}. */ + public void registerTransportService(String name, Class clazz) { + transportServiceTypes.registerExtension(name, clazz); + } + + /** Adds a transport implementation that can be selected by setting {@link #TRANSPORT_TYPE_KEY}. */ + public void registerTransport(String name, Class clazz) { + transportTypes.registerExtension(name, clazz); + } + + /** Adds an http transport implementation that can be selected by setting {@link #HTTP_TYPE_KEY}. */ + // TODO: we need another name than "http transport"....so confusing with transportClient... + public void registerHttpTransport(String name, Class clazz) { + if (transportClient) { + throw new IllegalArgumentException("Cannot register http transport " + clazz.getName() + " for transport client"); + } + httpTransportTypes.registerExtension(name, clazz); + } + + /** Adds an additional rest action. */ + // TODO: change this further to eliminate the middle man, ie RestController, and just register method and path here + public void registerRestHandler(Class clazz) { + if (transportClient) { + throw new IllegalArgumentException("Cannot register rest handler " + clazz.getName() + " for transport client"); + } + if (AbstractCatAction.class.isAssignableFrom(clazz)) { + catHandlers.registerExtension(clazz.asSubclass(AbstractCatAction.class)); + } else { + restHandlers.registerExtension(clazz); + } } @Override protected void configure() { bind(NetworkService.class).toInstance(networkService); + bind(NamedWriteableRegistry.class).asEagerSingleton(); + + transportServiceTypes.bindType(binder(), settings, TRANSPORT_SERVICE_TYPE_KEY, NETTY_TRANSPORT); + String defaultTransport = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; + transportTypes.bindType(binder(), settings, TRANSPORT_TYPE_KEY, defaultTransport); + + if (transportClient) { + bind(Headers.class).asEagerSingleton(); + bind(TransportProxyClient.class).asEagerSingleton(); + bind(TransportClientNodesService.class).asEagerSingleton(); + } else { + if (settings.getAsBoolean(HTTP_ENABLED, true)) { + bind(HttpServer.class).asEagerSingleton(); + httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT); + } + bind(RestController.class).asEagerSingleton(); + catHandlers.bind(binder()); + restHandlers.bind(binder()); + } } } diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java b/core/src/main/java/org/elasticsearch/http/HttpServerModule.java deleted file mode 100644 index 49d67369643..00000000000 --- a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.http; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.netty.NettyHttpServerTransport; - -import java.util.Objects; - -/** - * - */ -public class HttpServerModule extends AbstractModule { - - private final Settings settings; - private final ESLogger logger; - - private Class httpServerTransportClass; - - public HttpServerModule(Settings settings) { - this.settings = settings; - this.logger = Loggers.getLogger(getClass(), settings); - this.httpServerTransportClass = NettyHttpServerTransport.class; - } - - @SuppressWarnings({"unchecked"}) - @Override - protected void configure() { - bind(HttpServerTransport.class).to(httpServerTransportClass).asEagerSingleton(); - bind(HttpServer.class).asEagerSingleton(); - } - - public void setHttpServerTransport(Class httpServerTransport, String source) { - Objects.requireNonNull(httpServerTransport, "Configured http server transport may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); - logger.info("Using [{}] as http transport, overridden by [{}]", httpServerTransportClass.getName(), source); - this.httpServerTransportClass = httpServerTransport; - } -} diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index d3f6367cac0..c05e1969c23 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -23,7 +23,6 @@ import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionModule; -import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClientModule; @@ -33,7 +32,6 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; @@ -62,7 +60,6 @@ import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.GatewayModule; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.http.HttpServer; -import org.elasticsearch.http.HttpServerModule; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; @@ -86,7 +83,6 @@ import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestModule; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; @@ -95,7 +91,6 @@ import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.tribe.TribeModule; import org.elasticsearch.tribe.TribeService; @@ -108,7 +103,6 @@ import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.Charset; -import java.nio.file.CopyOption; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; @@ -185,20 +179,15 @@ public class Node implements Releasable { } modules.add(new PluginsModule(pluginsService)); modules.add(new SettingsModule(this.settings, settingsFilter)); - modules.add(new NodeModule(this, nodeSettingsService, monitorService)); - modules.add(new NetworkModule(networkService)); - modules.add(new ScriptModule(this.settings)); modules.add(new EnvironmentModule(environment)); + modules.add(new NodeModule(this, nodeSettingsService, monitorService)); + modules.add(new NetworkModule(networkService, settings, false)); + modules.add(new ScriptModule(this.settings)); modules.add(new NodeEnvironmentModule(nodeEnvironment)); modules.add(new ClusterNameModule(this.settings)); modules.add(new ThreadPoolModule(threadPool)); modules.add(new DiscoveryModule(this.settings)); modules.add(new ClusterModule(this.settings)); - modules.add(new RestModule(this.settings)); - modules.add(new TransportModule(settings)); - if (settings.getAsBoolean(HTTP_ENABLED, true)) { - modules.add(new HttpServerModule(settings)); - } modules.add(new IndicesModule()); modules.add(new SearchModule()); modules.add(new ActionModule(false)); diff --git a/core/src/main/java/org/elasticsearch/rest/RestModule.java b/core/src/main/java/org/elasticsearch/rest/RestModule.java deleted file mode 100644 index e7949172d0a..00000000000 --- a/core/src/main/java/org/elasticsearch/rest/RestModule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.action.RestActionModule; - -import java.util.ArrayList; -import java.util.List; - -/** - * - */ -public class RestModule extends AbstractModule { - - private final Settings settings; - private List> restPluginsActions = new ArrayList<>(); - - public void addRestAction(Class restAction) { - restPluginsActions.add(restAction); - } - - public RestModule(Settings settings) { - this.settings = settings; - } - - - @Override - protected void configure() { - bind(RestController.class).asEagerSingleton(); - new RestActionModule(restPluginsActions).configure(binder()); - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java b/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java deleted file mode 100644 index f0e4d10d7c4..00000000000 --- a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction; -import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; -import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; -import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; -import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction; -import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction; -import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction; -import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; -import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction; -import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction; -import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction; -import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction; -import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; -import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction; -import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction; -import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction; -import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction; -import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction; -import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction; -import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction; -import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction; -import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction; -import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction; -import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction; -import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction; -import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction; -import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction; -import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction; -import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction; -import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction; -import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction; -import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction; -import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; -import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction; -import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; -import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction; -import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction; -import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction; -import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; -import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction; -import org.elasticsearch.rest.action.bulk.RestBulkAction; -import org.elasticsearch.rest.action.cat.AbstractCatAction; -import org.elasticsearch.rest.action.cat.RestAliasAction; -import org.elasticsearch.rest.action.cat.RestAllocationAction; -import org.elasticsearch.rest.action.cat.RestCatAction; -import org.elasticsearch.rest.action.cat.RestFielddataAction; -import org.elasticsearch.rest.action.cat.RestHealthAction; -import org.elasticsearch.rest.action.cat.RestIndicesAction; -import org.elasticsearch.rest.action.cat.RestMasterAction; -import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; -import org.elasticsearch.rest.action.cat.RestNodesAction; -import org.elasticsearch.rest.action.cat.RestPluginsAction; -import org.elasticsearch.rest.action.cat.RestRepositoriesAction; -import org.elasticsearch.rest.action.cat.RestSegmentsAction; -import org.elasticsearch.rest.action.cat.RestShardsAction; -import org.elasticsearch.rest.action.cat.RestSnapshotAction; -import org.elasticsearch.rest.action.cat.RestThreadPoolAction; -import org.elasticsearch.rest.action.delete.RestDeleteAction; -import org.elasticsearch.rest.action.explain.RestExplainAction; -import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction; -import org.elasticsearch.rest.action.get.RestGetAction; -import org.elasticsearch.rest.action.get.RestGetSourceAction; -import org.elasticsearch.rest.action.get.RestHeadAction; -import org.elasticsearch.rest.action.get.RestMultiGetAction; -import org.elasticsearch.rest.action.index.RestIndexAction; -import org.elasticsearch.rest.action.main.RestMainAction; -import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; -import org.elasticsearch.rest.action.percolate.RestPercolateAction; -import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction; -import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction; -import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; -import org.elasticsearch.rest.action.search.RestClearScrollAction; -import org.elasticsearch.rest.action.search.RestMultiSearchAction; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.rest.action.search.RestSearchScrollAction; -import org.elasticsearch.rest.action.suggest.RestSuggestAction; -import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction; -import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction; -import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction; -import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; -import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; -import org.elasticsearch.rest.action.update.RestUpdateAction; - -import java.util.ArrayList; -import java.util.List; - -/** - * - */ -public class RestActionModule extends AbstractModule { - private List> restPluginsActions = new ArrayList<>(); - - public RestActionModule(List> restPluginsActions) { - this.restPluginsActions = restPluginsActions; - } - - @Override - protected void configure() { - for (Class restAction : restPluginsActions) { - bind(restAction).asEagerSingleton(); - } - - bind(RestMainAction.class).asEagerSingleton(); - - bind(RestNodesInfoAction.class).asEagerSingleton(); - bind(RestNodesStatsAction.class).asEagerSingleton(); - bind(RestNodesHotThreadsAction.class).asEagerSingleton(); - bind(RestClusterStatsAction.class).asEagerSingleton(); - bind(RestClusterStateAction.class).asEagerSingleton(); - bind(RestClusterHealthAction.class).asEagerSingleton(); - bind(RestClusterUpdateSettingsAction.class).asEagerSingleton(); - bind(RestClusterGetSettingsAction.class).asEagerSingleton(); - bind(RestClusterRerouteAction.class).asEagerSingleton(); - bind(RestClusterSearchShardsAction.class).asEagerSingleton(); - bind(RestPendingClusterTasksAction.class).asEagerSingleton(); - bind(RestPutRepositoryAction.class).asEagerSingleton(); - bind(RestGetRepositoriesAction.class).asEagerSingleton(); - bind(RestDeleteRepositoryAction.class).asEagerSingleton(); - bind(RestVerifyRepositoryAction.class).asEagerSingleton(); - bind(RestGetSnapshotsAction.class).asEagerSingleton(); - bind(RestCreateSnapshotAction.class).asEagerSingleton(); - bind(RestRestoreSnapshotAction.class).asEagerSingleton(); - bind(RestDeleteSnapshotAction.class).asEagerSingleton(); - bind(RestSnapshotsStatusAction.class).asEagerSingleton(); - - bind(RestIndicesExistsAction.class).asEagerSingleton(); - bind(RestTypesExistsAction.class).asEagerSingleton(); - bind(RestGetIndicesAction.class).asEagerSingleton(); - bind(RestIndicesStatsAction.class).asEagerSingleton(); - bind(RestIndicesSegmentsAction.class).asEagerSingleton(); - bind(RestIndicesShardStoresAction.class).asEagerSingleton(); - bind(RestGetAliasesAction.class).asEagerSingleton(); - bind(RestAliasesExistAction.class).asEagerSingleton(); - bind(RestIndexDeleteAliasesAction.class).asEagerSingleton(); - bind(RestIndexPutAliasAction.class).asEagerSingleton(); - bind(RestIndicesAliasesAction.class).asEagerSingleton(); - bind(RestGetIndicesAliasesAction.class).asEagerSingleton(); - bind(RestCreateIndexAction.class).asEagerSingleton(); - bind(RestDeleteIndexAction.class).asEagerSingleton(); - bind(RestCloseIndexAction.class).asEagerSingleton(); - bind(RestOpenIndexAction.class).asEagerSingleton(); - - bind(RestUpdateSettingsAction.class).asEagerSingleton(); - bind(RestGetSettingsAction.class).asEagerSingleton(); - - bind(RestAnalyzeAction.class).asEagerSingleton(); - bind(RestGetIndexTemplateAction.class).asEagerSingleton(); - bind(RestPutIndexTemplateAction.class).asEagerSingleton(); - bind(RestDeleteIndexTemplateAction.class).asEagerSingleton(); - bind(RestHeadIndexTemplateAction.class).asEagerSingleton(); - - bind(RestPutWarmerAction.class).asEagerSingleton(); - bind(RestDeleteWarmerAction.class).asEagerSingleton(); - bind(RestGetWarmerAction.class).asEagerSingleton(); - - bind(RestPutMappingAction.class).asEagerSingleton(); - bind(RestGetMappingAction.class).asEagerSingleton(); - bind(RestGetFieldMappingAction.class).asEagerSingleton(); - - bind(RestRefreshAction.class).asEagerSingleton(); - bind(RestFlushAction.class).asEagerSingleton(); - bind(RestSyncedFlushAction.class).asEagerSingleton(); - bind(RestForceMergeAction.class).asEagerSingleton(); - bind(RestUpgradeAction.class).asEagerSingleton(); - bind(RestClearIndicesCacheAction.class).asEagerSingleton(); - - bind(RestIndexAction.class).asEagerSingleton(); - bind(RestGetAction.class).asEagerSingleton(); - bind(RestGetSourceAction.class).asEagerSingleton(); - bind(RestHeadAction.class).asEagerSingleton(); - bind(RestMultiGetAction.class).asEagerSingleton(); - bind(RestDeleteAction.class).asEagerSingleton(); - bind(org.elasticsearch.rest.action.count.RestCountAction.class).asEagerSingleton(); - bind(RestSuggestAction.class).asEagerSingleton(); - bind(RestTermVectorsAction.class).asEagerSingleton(); - bind(RestMultiTermVectorsAction.class).asEagerSingleton(); - bind(RestBulkAction.class).asEagerSingleton(); - bind(RestUpdateAction.class).asEagerSingleton(); - bind(RestPercolateAction.class).asEagerSingleton(); - bind(RestMultiPercolateAction.class).asEagerSingleton(); - - bind(RestSearchAction.class).asEagerSingleton(); - bind(RestSearchScrollAction.class).asEagerSingleton(); - bind(RestClearScrollAction.class).asEagerSingleton(); - bind(RestMultiSearchAction.class).asEagerSingleton(); - bind(RestRenderSearchTemplateAction.class).asEagerSingleton(); - - bind(RestValidateQueryAction.class).asEagerSingleton(); - - bind(RestExplainAction.class).asEagerSingleton(); - - bind(RestRecoveryAction.class).asEagerSingleton(); - - // Templates API - bind(RestGetSearchTemplateAction.class).asEagerSingleton(); - bind(RestPutSearchTemplateAction.class).asEagerSingleton(); - bind(RestDeleteSearchTemplateAction.class).asEagerSingleton(); - - // Scripts API - bind(RestGetIndexedScriptAction.class).asEagerSingleton(); - bind(RestPutIndexedScriptAction.class).asEagerSingleton(); - bind(RestDeleteIndexedScriptAction.class).asEagerSingleton(); - - - bind(RestFieldStatsAction.class).asEagerSingleton(); - - // cat API - Multibinder catActionMultibinder = Multibinder.newSetBinder(binder(), AbstractCatAction.class); - catActionMultibinder.addBinding().to(RestAllocationAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestShardsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestMasterAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestNodesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestIndicesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestSegmentsAction.class).asEagerSingleton(); - // Fully qualified to prevent interference with rest.action.count.RestCountAction - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestCountAction.class).asEagerSingleton(); - // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestRecoveryAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestHealthAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestAliasAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestThreadPoolAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestPluginsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestFielddataAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestNodeAttrsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestRepositoriesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestSnapshotAction.class).asEagerSingleton(); - // no abstract cat action - bind(RestCatAction.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/transport/TransportModule.java b/core/src/main/java/org/elasticsearch/transport/TransportModule.java deleted file mode 100644 index abf90deee81..00000000000 --- a/core/src/main/java/org/elasticsearch/transport/TransportModule.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.local.LocalTransport; -import org.elasticsearch.transport.netty.NettyTransport; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * - */ -public class TransportModule extends AbstractModule { - - public static final String TRANSPORT_TYPE_KEY = "transport.type"; - public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type"; - - public static final String LOCAL_TRANSPORT = "local"; - public static final String NETTY_TRANSPORT = "netty"; - - private final ESLogger logger; - private final Settings settings; - - private final Map> transportServices = new HashMap<>(); - private final Map> transports = new HashMap<>(); - private Class configuredTransportService; - private Class configuredTransport; - private String configuredTransportServiceSource; - private String configuredTransportSource; - - public TransportModule(Settings settings) { - this.settings = settings; - this.logger = Loggers.getLogger(getClass(), settings); - addTransport(LOCAL_TRANSPORT, LocalTransport.class); - addTransport(NETTY_TRANSPORT, NettyTransport.class); - } - - public void addTransportService(String name, Class clazz) { - Class oldClazz = transportServices.put(name, clazz); - if (oldClazz != null) { - throw new IllegalArgumentException("Cannot register TransportService [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName()); - } - } - - public void addTransport(String name, Class clazz) { - Class oldClazz = transports.put(name, clazz); - if (oldClazz != null) { - throw new IllegalArgumentException("Cannot register Transport [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName()); - } - } - - @Override - protected void configure() { - if (configuredTransportService != null) { - logger.info("Using [{}] as transport service, overridden by [{}]", configuredTransportService.getName(), configuredTransportServiceSource); - bind(TransportService.class).to(configuredTransportService).asEagerSingleton(); - } else { - String typeName = settings.get(TRANSPORT_SERVICE_TYPE_KEY); - if (typeName == null) { - bind(TransportService.class).asEagerSingleton(); - } else { - if (transportServices.containsKey(typeName) == false) { - throw new IllegalArgumentException("Unknown TransportService type [" + typeName + "], known types are: " + transportServices.keySet()); - } - bind(TransportService.class).to(transportServices.get(typeName)).asEagerSingleton(); - } - } - - bind(NamedWriteableRegistry.class).asEagerSingleton(); - if (configuredTransport != null) { - logger.info("Using [{}] as transport, overridden by [{}]", configuredTransport.getName(), configuredTransportSource); - bind(Transport.class).to(configuredTransport).asEagerSingleton(); - } else { - String defaultType = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; - String typeName = settings.get(TRANSPORT_TYPE_KEY, defaultType); - Class clazz = transports.get(typeName); - if (clazz == null) { - throw new IllegalArgumentException("Unknown Transport [" + typeName + "]"); - } - bind(Transport.class).to(clazz).asEagerSingleton(); - } - } - - public void setTransportService(Class transportService, String source) { - Objects.requireNonNull(transportService, "Configured transport service may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport service may not be null"); - this.configuredTransportService = transportService; - this.configuredTransportServiceSource = source; - } - - public void setTransport(Class transport, String source) { - Objects.requireNonNull(transport, "Configured transport may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); - this.configuredTransport = transport; - this.configuredTransportSource = source; - } -} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index f4b29768b91..f452bb5c36c 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -40,7 +41,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; @@ -114,12 +114,12 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { public String description() { return "a mock transport service"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("internal", InternalTransportService.class); + public void onModule(NetworkModule transportModule) { + transportModule.registerTransportService("internal", InternalTransportService.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build(); } } diff --git a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java index 7901694bd4b..7d696b0cd81 100644 --- a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -60,6 +60,24 @@ public abstract class ModuleTestCase extends ESTestCase { fail("Did not find any binding to " + to.getName() + ". Found these bindings:\n" + s); } + /** Configures the module and asserts "clazz" is not bound to anything. */ + public void assertNotBound(Module module, Class clazz) { + List elements = Elements.getElements(module); + for (Element element : elements) { + if (element instanceof LinkedKeyBinding) { + LinkedKeyBinding binding = (LinkedKeyBinding) element; + if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { + fail("Found binding for " + clazz.getName() + " to " + binding.getKey().getTypeLiteral().getType().getTypeName()); + } + } else if (element instanceof UntargettedBinding) { + UntargettedBinding binding = (UntargettedBinding) element; + if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { + fail("Found binding for " + clazz.getName()); + } + } + } + } + /** * Attempts to configure the module, and asserts an {@link IllegalArgumentException} is * caught, containing the given messages diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java new file mode 100644 index 00000000000..798e82a979e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.network; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Table; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpServerAdapter; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.HttpStats; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestNodesAction; +import org.elasticsearch.rest.action.main.RestMainAction; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; + +public class NetworkModuleTests extends ModuleTestCase { + + static class FakeTransportService extends TransportService { + public FakeTransportService() { + super(null, null); + } + } + + static class FakeTransport extends AssertingLocalTransport { + public FakeTransport() { + super(null, null, null, null); + } + } + + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { + public FakeHttpTransport() { + super(null); + } + @Override + protected void doStart() {} + @Override + protected void doStop() {} + @Override + protected void doClose() {} + @Override + public BoundTransportAddress boundAddress() { + return null; + } + @Override + public HttpInfo info() { + return null; + } + @Override + public HttpStats stats() { + return null; + } + @Override + public void httpServerAdapter(HttpServerAdapter httpServerAdapter) {} + } + + static class FakeRestHandler extends BaseRestHandler { + public FakeRestHandler() { + super(null, null, null); + } + @Override + protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {} + } + + static class FakeCatRestHandler extends AbstractCatAction { + public FakeCatRestHandler() { + super(null, null, null); + } + @Override + protected void doRequest(RestRequest request, RestChannel channel, Client client) {} + @Override + protected void documentation(StringBuilder sb) {} + @Override + protected Table getTableWithHeader(RestRequest request) { + return null; + } + } + + public void testRegisterTransportService() { + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerTransportService("custom", FakeTransportService.class); + assertBinding(module, TransportService.class, FakeTransportService.class); + + // check it works with transport only as well + module = new NetworkModule(new NetworkService(settings), settings, true); + module.registerTransportService("custom", FakeTransportService.class); + assertBinding(module, TransportService.class, FakeTransportService.class); + } + + public void testRegisterTransport() { + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerTransport("custom", FakeTransport.class); + assertBinding(module, Transport.class, FakeTransport.class); + + // check it works with transport only as well + module = new NetworkModule(new NetworkService(settings), settings, true); + module.registerTransport("custom", FakeTransport.class); + assertBinding(module, Transport.class, FakeTransport.class); + } + + public void testRegisterHttpTransport() { + Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerHttpTransport("custom", FakeHttpTransport.class); + assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); + + // check registration not allowed for transport only + module = new NetworkModule(new NetworkService(settings), settings, true); + try { + module.registerHttpTransport("custom", FakeHttpTransport.class); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Cannot register http transport")); + assertTrue(e.getMessage().contains("for transport client")); + } + + // not added if http is disabled + settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build(); + module = new NetworkModule(new NetworkService(settings), settings, false); + assertNotBound(module, HttpServerTransport.class); + } + + public void testRegisterRestHandler() { + Settings settings = Settings.EMPTY; + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerRestHandler(FakeRestHandler.class); + // also check a builtin is bound + assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class); + + // check registration not allowed for transport only + module = new NetworkModule(new NetworkService(settings), settings, true); + try { + module.registerRestHandler(FakeRestHandler.class); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Cannot register rest handler")); + assertTrue(e.getMessage().contains("for transport client")); + } + } + + public void testRegisterCatRestHandler() { + Settings settings = Settings.EMPTY; + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerRestHandler(FakeCatRestHandler.class); + // also check a builtin is bound + assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class); + } +} diff --git a/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java b/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java deleted file mode 100644 index 7831b7ca994..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.transport.AssertingLocalTransport; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestOptions; - -import java.io.IOException; -import java.util.Collection; -import java.util.concurrent.atomic.AtomicInteger; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -/** - * - */ -@ClusterScope(scope = Scope.SUITE, numDataNodes = 2) -public class PluggableTransportModuleIT extends ESIntegTestCase { - public static final AtomicInteger SENT_REQUEST_COUNTER = new AtomicInteger(0); - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "local") - .build(); - } - - @Override - protected Collection> nodePlugins() { - return pluginList(CountingSentRequestsPlugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return pluginList(CountingSentRequestsPlugin.class); - } - - public void testThatPluginFunctionalityIsLoadedWithoutConfiguration() throws Exception { - for (Transport transport : internalCluster().getInstances(Transport.class)) { - assertThat(transport, instanceOf(CountingAssertingLocalTransport.class)); - } - - int countBeforeRequest = SENT_REQUEST_COUNTER.get(); - internalCluster().clientNodeClient().admin().cluster().prepareHealth().get(); - int countAfterRequest = SENT_REQUEST_COUNTER.get(); - assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest))); - } - - public static class CountingSentRequestsPlugin extends Plugin { - @Override - public String name() { - return "counting-pipelines-plugin"; - } - - @Override - public String description() { - return "counting-pipelines-plugin"; - } - - public void onModule(TransportModule transportModule) { - transportModule.setTransport(CountingAssertingLocalTransport.class, this.name()); - } - } - - public static final class CountingAssertingLocalTransport extends AssertingLocalTransport { - - @Inject - public CountingAssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { - super(settings, threadPool, version, namedWriteableRegistry); - } - - @Override - public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { - SENT_REQUEST_COUNTER.incrementAndGet(); - super.sendRequest(node, requestId, action, request, options); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java index b9282cf05ad..a16f318140f 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugins.responseheader; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; public class TestResponseHeaderPlugin extends Plugin { @@ -34,7 +34,7 @@ public class TestResponseHeaderPlugin extends Plugin { return "test-plugin-custom-header-desc"; } - public void onModule(RestModule restModule) { - restModule.addRestAction(TestResponseHeaderRestAction.class); + public void onModule(NetworkModule module) { + module.registerRestHandler(TestResponseHeaderRestAction.class); } } diff --git a/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java new file mode 100644 index 00000000000..d587ab05e45 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.threadpool.ThreadPool; + +/** Unit tests for module registering custom transport and transport service */ +public class TransportModuleTests extends ModuleTestCase { + + + + static class FakeTransport extends AssertingLocalTransport { + @Inject + public FakeTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { + super(settings, threadPool, version, namedWriteableRegistry); + } + } + + static class FakeTransportService extends TransportService { + @Inject + public FakeTransportService(Settings settings, Transport transport, ThreadPool threadPool) { + super(settings, transport, threadPool); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index c84a9eb9a55..78caef498d1 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -21,13 +21,14 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -40,7 +41,6 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelPipeline; @@ -66,7 +66,7 @@ public class NettyTransportIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) .put("node.mode", "network") - .put(TransportModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); + .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); } @Override @@ -99,8 +99,8 @@ public class NettyTransportIT extends ESIntegTestCase { public String description() { return "an exception throwing transport for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransport("exception-throwing", ExceptionThrowingNettyTransport.class); + public void onModule(NetworkModule module) { + module.registerTransport("exception-throwing", ExceptionThrowingNettyTransport.class); } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java index 59ef26c42af..ee49012291d 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java @@ -19,11 +19,12 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -31,7 +32,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.junit.annotations.Network; -import org.elasticsearch.transport.TransportModule; import java.net.InetAddress; import java.util.Locale; @@ -60,7 +60,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { Settings.Builder builder = settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("network.host", "127.0.0.1") - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("node.mode", "network") .put("transport.profiles.client1.port", randomPortRange) .put("transport.profiles.client1.publish_host", "127.0.0.7") @@ -72,7 +72,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { public void testThatTransportClientCanConnect() throws Exception { Settings settings = settingsBuilder() .put("cluster.name", internalCluster().getClusterName()) - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("path.home", createTempDir().toString()) .build(); try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) { diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java index 3437701f6c9..ea67ce32717 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java @@ -21,22 +21,19 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.transport.TransportModule; import java.net.Inet4Address; -import java.net.Inet6Address; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; /** * Checks that Elasticsearch produces a sane publish_address when it binds to @@ -48,7 +45,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("node.mode", "network").build(); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 66a764dd75a..516514599ae 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -85,6 +85,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -95,16 +96,32 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; @ClusterScope(scope = Scope.SUITE, numClientNodes = 1, minNumDataNodes = 2) public class IndicesRequestTests extends ESIntegTestCase { @@ -127,7 +144,7 @@ public class IndicesRequestTests extends ESIntegTestCase { protected Settings nodeSettings(int ordinal) { // must set this independently of the plugin so it overrides MockTransportService return Settings.builder().put(super.nodeSettings(ordinal)) - .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build(); + .put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build(); } @Override @@ -756,8 +773,8 @@ public class IndicesRequestTests extends ESIntegTestCase { public String description() { return "an intercepting transport service for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("intercepting", InterceptingTransportService.class); + public void onModule(NetworkModule module) { + module.registerTransportService("intercepting", InterceptingTransportService.class); } } diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java index b189745408f..8395223f669 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java @@ -22,14 +22,10 @@ package org.elasticsearch.plugin.deletebyquery; import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.deletebyquery.DeleteByQueryAction; import org.elasticsearch.action.deletebyquery.TransportDeleteByQueryAction; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; import org.elasticsearch.rest.action.deletebyquery.RestDeleteByQueryAction; -import java.util.Collection; -import java.util.Collections; - public class DeleteByQueryPlugin extends Plugin { public static final String NAME = "delete-by-query"; @@ -48,8 +44,8 @@ public class DeleteByQueryPlugin extends Plugin { actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); } - public void onModule(RestModule restModule) { - restModule.addRestAction(RestDeleteByQueryAction.class); + public void onModule(NetworkModule module) { + module.registerRestHandler(RestDeleteByQueryAction.class); } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 3e5c903a1ba..ade424599fe 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -26,14 +26,11 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.listeners.LoggingListener; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportModule; import java.io.IOException; import java.lang.annotation.ElementType; @@ -43,10 +40,7 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; import java.util.Map; -import java.util.Random; import static org.hamcrest.Matchers.is; @@ -238,7 +232,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); - builder.put(TransportModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external builder.put("node.mode", "network"); return builder.build(); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 6ab39a5b139..05f194fc26a 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -28,11 +28,11 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.transport.TransportModule; import java.io.Closeable; import java.io.IOException; @@ -111,9 +111,9 @@ final class ExternalNode implements Closeable { case "path.home": case "node.mode": case "node.local": - case TransportModule.TRANSPORT_TYPE_KEY: + case NetworkModule.TRANSPORT_TYPE_KEY: case DiscoveryModule.DISCOVERY_TYPE_KEY: - case TransportModule.TRANSPORT_SERVICE_TYPE_KEY: + case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: continue; default: diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 64cc401cb5f..8b395003576 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -30,7 +31,6 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; @@ -51,12 +51,12 @@ public class AssertingLocalTransport extends LocalTransport { public String description() { return "an asserting transport for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransport("mock", AssertingLocalTransport.class); + public void onModule(NetworkModule module) { + module.registerTransport("mock", AssertingLocalTransport.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_TYPE_KEY, "mock").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "mock").build(); } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index e1efd6c3745..d636341e42f 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -38,7 +39,6 @@ import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -75,12 +75,12 @@ public class MockTransportService extends TransportService { public String description() { return "a mock transport service for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("mock", MockTransportService.class); + public void onModule(NetworkModule module) { + module.registerTransportService("mock", MockTransportService.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "mock").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "mock").build(); } } From ec908ddfd652cc66639653d98021b6cce9894f38 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Dec 2015 12:20:19 +0100 Subject: [PATCH 033/322] Use transport service to handle RetryOnReplicaException and execute replica action on the current node. Transport service will delegate to threadpool internally. --- .../TransportReplicationAction.java | 8 +- .../TransportChannelResponseHandler.java | 76 +++++++++++++++++++ 2 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 26c439c0a3d..d17cc02c5b0 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -300,11 +300,15 @@ public abstract class TransportReplicationAction handler = TransportChannelResponseHandler.emptyResponseHandler(logger, channel, extraMessage); + transportService.sendRequest(clusterService.localNode(), transportReplicaAction, request, handler); } @Override diff --git a/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java new file mode 100644 index 00000000000..8c042cd1937 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; + +/** + * Base class for delegating transport response to a transport channel + */ +public abstract class TransportChannelResponseHandler implements TransportResponseHandler { + + /** + * Convenience method for delegating an empty response to the provided changed + */ + public static TransportChannelResponseHandler emptyResponseHandler(ESLogger logger, TransportChannel channel, String extraInfoOnError) { + return new TransportChannelResponseHandler(logger, channel, extraInfoOnError) { + @Override + public TransportResponse.Empty newInstance() { + return TransportResponse.Empty.INSTANCE; + } + }; + } + + private final ESLogger logger; + private final TransportChannel channel; + private final String extraInfoOnError; + + protected TransportChannelResponseHandler(ESLogger logger, TransportChannel channel, String extraInfoOnError) { + this.logger = logger; + this.channel = channel; + this.extraInfoOnError = extraInfoOnError; + } + + @Override + public void handleResponse(T response) { + try { + channel.sendResponse(response); + } catch (IOException e) { + handleException(new TransportException(e)); + } + } + + @Override + public void handleException(TransportException exp) { + try { + channel.sendResponse(exp); + } catch (IOException e) { + logger.debug("failed to send failure {}", e, extraInfoOnError == null ? "" : "(" + extraInfoOnError + ")"); + } + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } +} From 50eeafa75c122096377f37bd9553e7ac9e4d45b5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 8 Dec 2015 15:14:50 +0100 Subject: [PATCH 034/322] Make mappings immutable. Today mappings are mutable because of two APIs: - Mapper.merge, which expects changes to be performed in-place - IncludeInAll, which allows to change whether values should be put in the `_all` field in place. This commit changes both APIs to return a modified copy instead of modifying in place so that mappings can be immutable. For now, only the type-level object is immutable, but in the future we can imagine making them immutable at the index-level so that mapping updates could be completely atomic at the index level. Close #9365 --- .../metadata/MetaDataMappingService.java | 8 +- .../index/mapper/DocumentMapper.java | 16 ++- .../index/mapper/DocumentParser.java | 4 +- .../index/mapper/FieldMapper.java | 112 ++++++++------- .../elasticsearch/index/mapper/Mapper.java | 4 +- .../index/mapper/MapperService.java | 22 +-- .../index/mapper/MapperUtils.java | 46 ------ .../elasticsearch/index/mapper/Mapping.java | 31 ++-- .../index/mapper/MergeResult.java | 81 ----------- .../index/mapper/MetadataFieldMapper.java | 4 + .../index/mapper/ParseContext.java | 2 +- .../index/mapper/ParsedDocument.java | 2 +- .../index/mapper/core/ByteFieldMapper.java | 3 +- .../mapper/core/CompletionFieldMapper.java | 8 +- .../index/mapper/core/DateFieldMapper.java | 3 +- .../index/mapper/core/DoubleFieldMapper.java | 3 +- .../index/mapper/core/FloatFieldMapper.java | 3 +- .../index/mapper/core/IntegerFieldMapper.java | 3 +- .../index/mapper/core/LongFieldMapper.java | 3 +- .../index/mapper/core/NumberFieldMapper.java | 52 ++++--- .../index/mapper/core/ShortFieldMapper.java | 3 +- .../index/mapper/core/StringFieldMapper.java | 48 ++++--- .../mapper/core/TokenCountFieldMapper.java | 15 +- .../mapper/geo/BaseGeoPointFieldMapper.java | 15 +- .../mapper/geo/GeoPointFieldMapperLegacy.java | 16 +-- .../index/mapper/geo/GeoShapeFieldMapper.java | 14 +- .../index/mapper/internal/AllFieldMapper.java | 26 +++- .../index/mapper/internal/IdFieldMapper.java | 3 +- .../mapper/internal/IndexFieldMapper.java | 9 +- .../mapper/internal/ParentFieldMapper.java | 15 +- .../mapper/internal/RoutingFieldMapper.java | 3 +- .../mapper/internal/SourceFieldMapper.java | 26 ++-- .../index/mapper/internal/TTLFieldMapper.java | 15 +- .../mapper/internal/TimestampFieldMapper.java | 49 +++---- .../mapper/internal/TypeFieldMapper.java | 3 +- .../index/mapper/internal/UidFieldMapper.java | 3 +- .../mapper/internal/VersionFieldMapper.java | 3 +- .../index/mapper/ip/IpFieldMapper.java | 3 +- .../index/mapper/object/ObjectMapper.java | 132 ++++++++---------- .../index/mapper/object/RootObjectMapper.java | 42 ++++-- .../shard/TranslogRecoveryPerformer.java | 2 +- .../mapper/copyto/CopyToMapperTests.java | 6 +- .../core/TokenCountFieldMapperTests.java | 7 +- .../mapper/date/SimpleDateMappingTests.java | 3 +- .../mapper/externalvalues/ExternalMapper.java | 3 +- .../ExternalMetadataMapper.java | 5 +- .../mapper/geo/GeoPointFieldMapperTests.java | 1 - .../mapper/geo/GeoShapeFieldMapperTests.java | 4 - .../mapper/merge/TestMergeMapperTests.java | 37 +++-- .../merge/JavaMultiFieldMergeTests.java | 13 +- .../source/DefaultSourceMappingTests.java | 21 +-- .../string/SimpleStringMappingTests.java | 4 +- .../timestamp/TimestampMappingTests.java | 38 +++-- .../index/mapper/ttl/TTLMappingTests.java | 42 +++--- .../mapper/update/UpdateMappingTests.java | 14 +- .../search/child/ChildQuerySearchIT.java | 2 +- .../mapper/attachments/AttachmentMapper.java | 2 +- .../mapper/murmur3/Murmur3FieldMapper.java | 3 +- .../index/mapper/size/SizeFieldMapper.java | 9 +- 59 files changed, 446 insertions(+), 623 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 957125703b6..bbaeb5a11d7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -37,7 +37,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.percolator.PercolatorService; @@ -251,11 +250,8 @@ public class MetaDataMappingService extends AbstractComponent { newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); if (existingMapper != null) { // first, simulate - MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); - // if we have conflicts, throw an exception - if (mergeResult.hasConflicts()) { - throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(mergeResult.buildConflicts()) + "}"); - } + // this will just throw exceptions in case of problems + existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); } else { // TODO: can we find a better place for this validation? // The reason this validation is here is that the mapper service doesn't learn about diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index c4fec8cf095..24374806717 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -52,6 +52,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -117,7 +118,7 @@ public class DocumentMapper implements ToXContent { private volatile CompressedXContent mappingSource; - private final Mapping mapping; + private volatile Mapping mapping; private final DocumentParser documentParser; @@ -352,16 +353,19 @@ public class DocumentMapper implements ToXContent { mapperService.addMappers(type, objectMappers, fieldMappers); } - public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { + public void merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { try (ReleasableLock lock = mappingWriteLock.acquire()) { mapperService.checkMappersCompatibility(type, mapping, updateAllTypes); - final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes); - this.mapping.merge(mapping, mergeResult); + // do the merge even if simulate == false so that we get exceptions + Mapping merged = this.mapping.merge(mapping, updateAllTypes); if (simulate == false) { - addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers(), updateAllTypes); + this.mapping = merged; + Collection objectMappers = new ArrayList<>(); + Collection fieldMappers = new ArrayList<>(Arrays.asList(merged.metadataMappers)); + MapperUtils.collect(merged.root, objectMappers, fieldMappers); + addMappers(objectMappers, fieldMappers, updateAllTypes); refreshSource(); } - return mergeResult; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index b0ad972d575..656ee2c600d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -267,7 +267,7 @@ class DocumentParser implements Closeable { if (update == null) { update = newUpdate; } else { - MapperUtils.merge(update, newUpdate); + update = update.merge(newUpdate, false); } } } @@ -759,7 +759,7 @@ class DocumentParser implements Closeable { private static M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException { final Mapper update = parseObjectOrField(context, mapper); if (update != null) { - MapperUtils.merge(mapper, update); + mapper = (M) mapper.merge(update, false); } return mapper; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index ced3f08b229..9997f8608d2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -47,7 +47,7 @@ import java.util.List; import java.util.Locale; import java.util.stream.StreamSupport; -public abstract class FieldMapper extends Mapper { +public abstract class FieldMapper extends Mapper implements Cloneable { public abstract static class Builder extends Mapper.Builder { @@ -84,8 +84,13 @@ public abstract class FieldMapper extends Mapper { * if the fieldType has a non-null option we are all good it might have been set through a different * call. */ - final IndexOptions options = getDefaultIndexOption(); - assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing"; + IndexOptions options = getDefaultIndexOption(); + if (options == IndexOptions.NONE) { + // can happen when an existing type on the same index has disabled indexing + // since we inherit the default field type from the first mapper that is + // created on an index + throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types of the same index"); + } fieldType.setIndexOptions(options); } } else { @@ -270,7 +275,7 @@ public abstract class FieldMapper extends Mapper { protected MappedFieldTypeReference fieldTypeRef; protected final MappedFieldType defaultFieldType; - protected final MultiFields multiFields; + protected MultiFields multiFields; protected CopyTo copyTo; protected final boolean indexCreatedBefore2x; @@ -359,26 +364,41 @@ public abstract class FieldMapper extends Mapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected FieldMapper clone() { + try { + return (FieldMapper) super.clone(); + } catch (CloneNotSupportedException e) { + throw new AssertionError(e); + } + } + + @Override + public FieldMapper merge(Mapper mergeWith, boolean updateAllTypes) { + FieldMapper merged = clone(); + merged.doMerge(mergeWith, updateAllTypes); + return merged; + } + + /** + * Merge changes coming from {@code mergeWith} in place. + * @param updateAllTypes TODO + */ + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (!this.getClass().equals(mergeWith.getClass())) { String mergedType = mergeWith.getClass().getSimpleName(); if (mergeWith instanceof FieldMapper) { mergedType = ((FieldMapper) mergeWith).contentType(); } - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); - // different types, return - return; + throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); } FieldMapper fieldMergeWith = (FieldMapper) mergeWith; - multiFields.merge(mergeWith, mergeResult); + multiFields = multiFields.merge(fieldMergeWith.multiFields); - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - // apply changeable values - MappedFieldType fieldType = fieldMergeWith.fieldType().clone(); - fieldType.freeze(); - fieldTypeRef.set(fieldType); - this.copyTo = fieldMergeWith.copyTo; - } + // apply changeable values + MappedFieldType fieldType = fieldMergeWith.fieldType().clone(); + fieldType.freeze(); + fieldTypeRef.set(fieldType); + this.copyTo = fieldMergeWith.copyTo; } @Override @@ -565,18 +585,20 @@ public abstract class FieldMapper extends Mapper { } private final ContentPath.Type pathType; - private volatile ImmutableOpenMap mappers; + private final ImmutableOpenMap mappers; - public MultiFields(ContentPath.Type pathType, ImmutableOpenMap mappers) { + private MultiFields(ContentPath.Type pathType, ImmutableOpenMap mappers) { this.pathType = pathType; - this.mappers = mappers; + ImmutableOpenMap.Builder builder = new ImmutableOpenMap.Builder<>(); // we disable the all in multi-field mappers - for (ObjectCursor cursor : mappers.values()) { + for (ObjectObjectCursor cursor : mappers) { FieldMapper mapper = cursor.value; if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); + mapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); } + builder.put(cursor.key, mapper); } + this.mappers = builder.build(); } public void parse(FieldMapper mainField, ParseContext context) throws IOException { @@ -598,47 +620,29 @@ public abstract class FieldMapper extends Mapper { context.path().pathType(origPathType); } - // No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge - public void merge(Mapper mergeWith, MergeResult mergeResult) { - FieldMapper mergeWithMultiField = (FieldMapper) mergeWith; + public MultiFields merge(MultiFields mergeWith) { + if (pathType != mergeWith.pathType) { + throw new IllegalArgumentException("Can't change path type from [" + pathType + "] to [" + mergeWith.pathType + "]"); + } + ImmutableOpenMap.Builder newMappersBuilder = ImmutableOpenMap.builder(mappers); - List newFieldMappers = null; - ImmutableOpenMap.Builder newMappersBuilder = null; - - for (ObjectCursor cursor : mergeWithMultiField.multiFields.mappers.values()) { + for (ObjectCursor cursor : mergeWith.mappers.values()) { FieldMapper mergeWithMapper = cursor.value; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); + FieldMapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); if (mergeIntoMapper == null) { - // no mapping, simply add it if not simulating - if (!mergeResult.simulate()) { - // we disable the all in multi-field mappers - if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll(); - } - if (newMappersBuilder == null) { - newMappersBuilder = ImmutableOpenMap.builder(mappers); - } - newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper); - if (mergeWithMapper instanceof FieldMapper) { - if (newFieldMappers == null) { - newFieldMappers = new ArrayList<>(2); - } - newFieldMappers.add(mergeWithMapper); - } + // we disable the all in multi-field mappers + if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) { + mergeWithMapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll(); } + newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper); } else { - mergeIntoMapper.merge(mergeWithMapper, mergeResult); + FieldMapper merged = mergeIntoMapper.merge(mergeWithMapper, false); + newMappersBuilder.put(merged.simpleName(), merged); // override previous definition } } - // first add all field mappers - if (newFieldMappers != null) { - mergeResult.addFieldMappers(newFieldMappers); - } - // now publish mappers - if (newMappersBuilder != null) { - mappers = newMappersBuilder.build(); - } + ImmutableOpenMap mappers = newMappersBuilder.build(); + return new MultiFields(pathType, mappers); } public Iterator iterator() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 33a4dabd3be..4c3aa3c56bb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -174,5 +174,7 @@ public abstract class Mapper implements ToXContent, Iterable { /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ public abstract String name(); - public abstract void merge(Mapper mergeWith, MergeResult mergeResult); + /** Return the merge of {@code mergeWith} into this. + * Both {@code this} and {@code mergeWith} will be left unmodified. */ + public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 938f610d6db..1d2961c482a 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -251,14 +251,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { DocumentMapper oldMapper = mappers.get(mapper.type()); if (oldMapper != null) { - // simulate first - MergeResult result = oldMapper.merge(mapper.mapping(), true, updateAllTypes); - if (result.hasConflicts()) { - throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(result.buildConflicts()) + "}"); - } - // then apply for real - result = oldMapper.merge(mapper.mapping(), false, updateAllTypes); - assert result.hasConflicts() == false; // we already simulated + oldMapper.merge(mapper.mapping(), false, updateAllTypes); return oldMapper; } else { Tuple, Collection> newMappers = checkMappersCompatibility( @@ -305,12 +298,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable { for (ObjectMapper newObjectMapper : objectMappers) { ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); if (existingObjectMapper != null) { - MergeResult result = new MergeResult(true, updateAllTypes); - existingObjectMapper.merge(newObjectMapper, result); - if (result.hasConflicts()) { - throw new IllegalArgumentException("Mapper for [" + newObjectMapper.fullPath() + "] conflicts with existing mapping in other types" + - Arrays.toString(result.buildConflicts())); - } + // simulate a merge and ignore the result, we are just interested + // in exceptions here + existingObjectMapper.merge(newObjectMapper, updateAllTypes); } } fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes); @@ -320,9 +310,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { String type, Mapping mapping, boolean updateAllTypes) { List objectMappers = new ArrayList<>(); List fieldMappers = new ArrayList<>(); - for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { - fieldMappers.add(metadataMapper); - } + Collections.addAll(fieldMappers, mapping.metadataMappers); MapperUtils.collect(mapping.root, objectMappers, fieldMappers); checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes); return new Tuple<>(objectMappers, fieldMappers); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java index d46c32a932b..04508827f77 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -27,52 +27,6 @@ import java.util.Collection; public enum MapperUtils { ; - private static MergeResult newStrictMergeResult() { - return new MergeResult(false, false) { - - @Override - public void addFieldMappers(Collection fieldMappers) { - // no-op - } - - @Override - public void addObjectMappers(Collection objectMappers) { - // no-op - } - - @Override - public Collection getNewFieldMappers() { - throw new UnsupportedOperationException("Strict merge result does not support new field mappers"); - } - - @Override - public Collection getNewObjectMappers() { - throw new UnsupportedOperationException("Strict merge result does not support new object mappers"); - } - - @Override - public void addConflict(String mergeFailure) { - throw new MapperParsingException("Merging dynamic updates triggered a conflict: " + mergeFailure); - } - }; - } - - /** - * Merge {@code mergeWith} into {@code mergeTo}. Note: this method only - * merges mappings, not lookup structures. Conflicts are returned as exceptions. - */ - public static void merge(Mapper mergeInto, Mapper mergeWith) { - mergeInto.merge(mergeWith, newStrictMergeResult()); - } - - /** - * Merge {@code mergeWith} into {@code mergeTo}. Note: this method only - * merges mappings, not lookup structures. Conflicts are returned as exceptions. - */ - public static void merge(Mapping mergeInto, Mapping mergeWith) { - mergeInto.merge(mergeWith, newStrictMergeResult()); - } - /** Split mapper and its descendants into object and field mappers. */ public static void collect(Mapper mapper, Collection objectMappers, Collection fieldMappers) { if (mapper instanceof RootObjectMapper) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java index bac42162552..a16024211bf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -47,19 +47,19 @@ public final class Mapping implements ToXContent { final RootObjectMapper root; final MetadataFieldMapper[] metadataMappers; final Map, MetadataFieldMapper> metadataMappersMap; - volatile Map meta; + final Map meta; public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map meta) { this.indexCreated = indexCreated; - this.root = rootObjectMapper; this.metadataMappers = metadataMappers; Map, MetadataFieldMapper> metadataMappersMap = new HashMap<>(); for (MetadataFieldMapper metadataMapper : metadataMappers) { if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) { - root.putMapper(metadataMapper); + rootObjectMapper = rootObjectMapper.copyAndPutMapper(metadataMapper); } metadataMappersMap.put(metadataMapper.getClass(), metadataMapper); } + this.root = rootObjectMapper; // keep root mappers sorted for consistent serialization Arrays.sort(metadataMappers, new Comparator() { @Override @@ -90,21 +90,20 @@ public final class Mapping implements ToXContent { } /** @see DocumentMapper#merge(Mapping, boolean, boolean) */ - public void merge(Mapping mergeWith, MergeResult mergeResult) { - assert metadataMappers.length == mergeWith.metadataMappers.length; - - root.merge(mergeWith.root, mergeResult); - for (MetadataFieldMapper metadataMapper : metadataMappers) { - MetadataFieldMapper mergeWithMetadataMapper = mergeWith.metadataMapper(metadataMapper.getClass()); - if (mergeWithMetadataMapper != null) { - metadataMapper.merge(mergeWithMetadataMapper, mergeResult); + public Mapping merge(Mapping mergeWith, boolean updateAllTypes) { + RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes); + Map, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap); + for (MetadataFieldMapper metaMergeWith : mergeWith.metadataMappers) { + MetadataFieldMapper mergeInto = mergedMetaDataMappers.get(metaMergeWith.getClass()); + MetadataFieldMapper merged; + if (mergeInto == null) { + merged = metaMergeWith; + } else { + merged = mergeInto.merge(metaMergeWith, updateAllTypes); } + mergedMetaDataMappers.put(merged.getClass(), merged); } - - if (mergeResult.simulate() == false) { - // let the merge with attributes to override the attributes - meta = mergeWith.meta; - } + return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java b/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java deleted file mode 100644 index f5698a0ed18..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.mapper.object.ObjectMapper; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** A container for tracking results of a mapping merge. */ -public class MergeResult { - - private final boolean simulate; - private final boolean updateAllTypes; - - private final List conflicts = new ArrayList<>(); - private final List newFieldMappers = new ArrayList<>(); - private final List newObjectMappers = new ArrayList<>(); - - public MergeResult(boolean simulate, boolean updateAllTypes) { - this.simulate = simulate; - this.updateAllTypes = updateAllTypes; - } - - public void addFieldMappers(Collection fieldMappers) { - assert simulate() == false; - newFieldMappers.addAll(fieldMappers); - } - - public void addObjectMappers(Collection objectMappers) { - assert simulate() == false; - newObjectMappers.addAll(objectMappers); - } - - public Collection getNewFieldMappers() { - return newFieldMappers; - } - - public Collection getNewObjectMappers() { - return newObjectMappers; - } - - public boolean simulate() { - return simulate; - } - - public boolean updateAllTypes() { - return updateAllTypes; - } - - public void addConflict(String mergeFailure) { - conflicts.add(mergeFailure); - } - - public boolean hasConflicts() { - return conflicts.isEmpty() == false; - } - - public String[] buildConflicts() { - return conflicts.toArray(Strings.EMPTY_ARRAY); - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index fc6d1fa9e1a..2f3b40126ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -70,4 +70,8 @@ public abstract class MetadataFieldMapper extends FieldMapper { */ public abstract void postParse(ParseContext context) throws IOException; + @Override + public MetadataFieldMapper merge(Mapper mergeWith, boolean updateAllTypes) { + return (MetadataFieldMapper) super.merge(mergeWith, updateAllTypes); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index edf75621c1e..0a88e29c8d6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -595,7 +595,7 @@ public abstract class ParseContext { if (dynamicMappingsUpdate == null) { dynamicMappingsUpdate = mapper; } else { - MapperUtils.merge(dynamicMappingsUpdate, mapper); + dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index ed8314c6f7d..aa35e699b2d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -128,7 +128,7 @@ public class ParsedDocument { if (dynamicMappingsUpdate == null) { dynamicMappingsUpdate = update; } else { - MapperUtils.merge(dynamicMappingsUpdate, update); + dynamicMappingsUpdate = dynamicMappingsUpdate.merge(update, false); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 61b22a1ee26..44b4cbcd35e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -77,8 +77,7 @@ public class ByteFieldMapper extends NumberFieldMapper { setupFieldType(context); ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (ByteFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 5b4df635a34..9d465b4cffc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -605,11 +605,9 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - this.maxInputLength = fieldMergeWith.maxInputLength; - } + this.maxInputLength = fieldMergeWith.maxInputLength; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 27b96b27a44..7a99e6b50c0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -123,8 +123,7 @@ public class DateFieldMapper extends NumberFieldMapper { fieldType.setNullValue(nullValue); DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (DateFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 0e512bf4281..861d33e560e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -80,8 +80,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { setupFieldType(context); DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (DoubleFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 9a607ffd415..ad88c745dfd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -81,8 +81,7 @@ public class FloatFieldMapper extends NumberFieldMapper { setupFieldType(context); FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (FloatFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 868cfeb4380..18995498113 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -85,8 +85,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (IntegerFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 4130c902586..9d9557c41f4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -84,8 +84,7 @@ public class LongFieldMapper extends NumberFieldMapper { setupFieldType(context); LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (LongFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 87a63de99ec..04dd1a21335 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -183,22 +183,41 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } @Override - public void includeInAll(Boolean includeInAll) { + protected NumberFieldMapper clone() { + return (NumberFieldMapper) super.clone(); + } + + @Override + public Mapper includeInAll(Boolean includeInAll) { if (includeInAll != null) { - this.includeInAll = includeInAll; + NumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { + public Mapper includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { - this.includeInAll = includeInAll; + NumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public Mapper unsetIncludeInAll() { + if (includeInAll != null) { + NumberFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } } @Override @@ -254,21 +273,16 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - this.includeInAll = nfmMergeWith.includeInAll; - if (nfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = nfmMergeWith.ignoreMalformed; - } - if (nfmMergeWith.coerce.explicit()) { - this.coerce = nfmMergeWith.coerce; - } + this.includeInAll = nfmMergeWith.includeInAll; + if (nfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = nfmMergeWith.ignoreMalformed; + } + if (nfmMergeWith.coerce.explicit()) { + this.coerce = nfmMergeWith.coerce; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 81ed6cc3bac..e455959c530 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -81,8 +81,7 @@ public class ShortFieldMapper extends NumberFieldMapper { ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (ShortFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 0a921ad85eb..061d3a2e343 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; @@ -150,8 +149,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc StringFieldMapper fieldMapper = new StringFieldMapper( name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return fieldMapper.includeInAll(includeInAll); } } @@ -257,22 +255,41 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } @Override - public void includeInAll(Boolean includeInAll) { + protected StringFieldMapper clone() { + return (StringFieldMapper) super.clone(); + } + + @Override + public StringFieldMapper includeInAll(Boolean includeInAll) { if (includeInAll != null) { - this.includeInAll = includeInAll; + StringFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { + public StringFieldMapper includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { - this.includeInAll = includeInAll; + StringFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public StringFieldMapper unsetIncludeInAll() { + if (includeInAll != null) { + StringFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } } @Override @@ -359,15 +376,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - if (!mergeResult.simulate()) { - this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; - this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; + this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index 8348892e44a..a485c3727fc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; @@ -81,8 +80,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), analyzer, multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (TokenCountFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -190,14 +188,9 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - if (!mergeResult.simulate()) { - this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 0b57d866ddd..2b1d091be3b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; @@ -388,17 +387,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); BaseGeoPointFieldMapper gpfmMergeWith = (BaseGeoPointFieldMapper) mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gpfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = gpfmMergeWith.ignoreMalformed; - } + if (gpfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = gpfmMergeWith.ignoreMalformed; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 84e6bde07ac..7e5a8738384 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -39,7 +39,6 @@ import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; @@ -297,23 +296,18 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith; if (gpfmMergeWith.coerce.explicit()) { if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]"); + throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] has different [coerce]"); } } - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gpfmMergeWith.coerce.explicit()) { - this.coerce = gpfmMergeWith.coerce; - } + if (gpfmMergeWith.coerce.explicit()) { + this.coerce = gpfmMergeWith.coerce; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 71b6d89610f..7c100a306c2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; @@ -475,17 +474,12 @@ public class GeoShapeFieldMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gsfm.coerce.explicit()) { - this.coerce = gsfm.coerce; - } + if (gsfm.coerce.explicit()) { + this.coerce = gsfm.coerce; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 645c36a4855..4676c63e793 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; @@ -58,11 +57,24 @@ public class AllFieldMapper extends MetadataFieldMapper { public interface IncludeInAll { - void includeInAll(Boolean includeInAll); + /** + * If {@code includeInAll} is not null then return a copy of this mapper + * that will include values in the _all field according to {@code includeInAll}. + */ + Mapper includeInAll(Boolean includeInAll); - void includeInAllIfNotSet(Boolean includeInAll); + /** + * If {@code includeInAll} is not null and not set on this mapper yet, then + * return a copy of this mapper that will include values in the _all field + * according to {@code includeInAll}. + */ + Mapper includeInAllIfNotSet(Boolean includeInAll); - void unsetIncludeInAll(); + /** + * If {@code includeInAll} was already set on this mapper then return a copy + * of this mapper that has {@code includeInAll} not set. + */ + Mapper unsetIncludeInAll(); } public static final String NAME = "_all"; @@ -309,11 +321,11 @@ public class AllFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); + throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } - super.merge(mergeWith, mergeResult); + super.doMerge(mergeWith, updateAllTypes); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 16b6c4c56da..a0b7cddae76 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -331,7 +330,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 962332b5c4b..167807f3b2a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; @@ -279,12 +278,10 @@ public class IndexFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = indexFieldMapperMergeWith.enabledState; - } + if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = indexFieldMapperMergeWith.enabledState; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 760259a1802..6142bf475ec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -371,11 +370,11 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; if (Objects.equals(parentType, fieldMergeWith.parentType) == false) { - mergeResult.addConflict("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]"); + throw new IllegalArgumentException("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]"); } List conflicts = new ArrayList<>(); @@ -383,13 +382,13 @@ public class ParentFieldMapper extends MetadataFieldMapper { parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here if (childJoinFieldType != null) { // TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type. - childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, mergeResult.updateAllTypes() == false); + childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false); } - for (String conflict : conflicts) { - mergeResult.addConflict(conflict); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Merge conflicts: " + conflicts); } - if (active() && mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { + if (active()) { childJoinFieldType = fieldMergeWith.childJoinFieldType.clone(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 18d0645d2d5..e791ad376c3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -249,7 +248,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index f9bcb31b406..4d47c3bf446 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -41,11 +41,11 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; @@ -310,18 +310,20 @@ public class SourceFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; - if (mergeResult.simulate()) { - if (this.enabled != sourceMergeWith.enabled) { - mergeResult.addConflict("Cannot update enabled setting for [_source]"); - } - if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { - mergeResult.addConflict("Cannot update includes setting for [_source]"); - } - if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { - mergeResult.addConflict("Cannot update excludes setting for [_source]"); - } + List conflicts = new ArrayList<>(); + if (this.enabled != sourceMergeWith.enabled) { + conflicts.add("Cannot update enabled setting for [_source]"); + } + if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { + conflicts.add("Cannot update includes setting for [_source]"); + } + if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { + conflicts.add("Cannot update excludes setting for [_source]"); + } + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 9a18befe622..7a17e56e7dd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.SourceToParse; @@ -258,21 +257,19 @@ public class TTLFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; - if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with - if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) { - mergeResult.addConflict("_ttl cannot be disabled once it was enabled."); + if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with + if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) { + throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled."); } else { - if (!mergeResult.simulate()) { - this.enabledState = ttlMergeWith.enabledState; - } + this.enabledState = ttlMergeWith.enabledState; } } if (ttlMergeWith.defaultTTL != -1) { // we never build the default when the field is disabled so we should also not set it // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) - if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) { + if (enabledState == EnabledAttributeMapper.ENABLED) { this.defaultTTL = ttlMergeWith.defaultTTL; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 468243d63cf..0657d67857b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -33,13 +33,13 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -379,31 +379,32 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith; - super.merge(mergeWith, mergeResult); - if (!mergeResult.simulate()) { - if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = timestampFieldMapperMergeWith.enabledState; - } - } else { - if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { - return; - } - if (defaultTimestamp == null) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); - } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } - if (this.path != null) { - if (path.equals(timestampFieldMapperMergeWith.path()) == false) { - mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path())); - } - } else if (timestampFieldMapperMergeWith.path() != null) { - mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing"); + super.doMerge(mergeWith, updateAllTypes); + if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = timestampFieldMapperMergeWith.enabledState; + } + if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { + return; + } + List conflicts = new ArrayList<>(); + if (defaultTimestamp == null) { + conflicts.add("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); + } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { + conflicts.add("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); + } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { + conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); + } + if (this.path != null) { + if (path.equals(timestampFieldMapperMergeWith.path()) == false) { + conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path())); } + } else if (timestampFieldMapperMergeWith.path() != null) { + conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing"); + } + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Conflicts: " + conflicts); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index d4acc3c5975..a140593943f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -225,7 +224,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index ef4c48e62e3..1cf3b9d9ac3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -225,7 +224,7 @@ public class UidFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 292a622ab73..d9659f40c22 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -166,7 +165,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // nothing to do } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index e57ceaf8ca8..d8a7c752e6f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -122,8 +122,7 @@ public class IpFieldMapper extends NumberFieldMapper { setupFieldType(context); IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (IpFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 88f89719050..cbbc8563576 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -160,7 +160,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, context.path().remove(); ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers, context.indexSettings()); - objectMapper.includeInAllIfNotSet(includeInAll); + objectMapper = objectMapper.includeInAllIfNotSet(includeInAll); return (Y) objectMapper; } @@ -389,41 +389,53 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } @Override - public void includeInAll(Boolean includeInAll) { + public ObjectMapper includeInAll(Boolean includeInAll) { if (includeInAll == null) { - return; + return this; } - this.includeInAll = includeInAll; + + ObjectMapper clone = clone(); + clone.includeInAll = includeInAll; // when called from outside, apply this on all the inner mappers - for (Mapper mapper : mappers.values()) { + for (Mapper mapper : clone.mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll)); } } + return clone; } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { - if (this.includeInAll == null) { - this.includeInAll = includeInAll; + public ObjectMapper includeInAllIfNotSet(Boolean includeInAll) { + if (includeInAll == null || this.includeInAll != null) { + return this; } + + ObjectMapper clone = clone(); + clone.includeInAll = includeInAll; // when called from outside, apply this on all the inner mappers - for (Mapper mapper : mappers.values()) { + for (Mapper mapper : clone.mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll)); } } + return clone; } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public ObjectMapper unsetIncludeInAll() { + if (includeInAll == null) { + return this; + } + ObjectMapper clone = clone(); + clone.includeInAll = null; // when called from outside, apply this on all the inner mappers for (Mapper mapper : mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll()); } } + return clone; } public Nested nested() { @@ -434,14 +446,9 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return this.nestedTypeFilter; } - /** - * Put a new mapper. - * NOTE: this method must be called under the current {@link DocumentMapper} - * lock if concurrent updates are expected. - */ - public void putMapper(Mapper mapper) { + protected void putMapper(Mapper mapper) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); + mapper = ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); } mappers = mappers.copyAndPut(mapper.simpleName(), mapper); } @@ -464,64 +471,43 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } @Override - public void merge(final Mapper mergeWith, final MergeResult mergeResult) { + public ObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) { if (!(mergeWith instanceof ObjectMapper)) { - mergeResult.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); - return; + throw new IllegalArgumentException("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); } ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; - - if (nested().isNested()) { - if (!mergeWithObject.nested().isNested()) { - mergeResult.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested"); - return; - } - } else { - if (mergeWithObject.nested().isNested()) { - mergeResult.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested"); - return; - } - } - - if (!mergeResult.simulate()) { - if (mergeWithObject.dynamic != null) { - this.dynamic = mergeWithObject.dynamic; - } - } - - doMerge(mergeWithObject, mergeResult); - - List mappersToPut = new ArrayList<>(); - List newObjectMappers = new ArrayList<>(); - List newFieldMappers = new ArrayList<>(); - for (Mapper mapper : mergeWithObject) { - Mapper mergeWithMapper = mapper; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); - if (mergeIntoMapper == null) { - // no mapping, simply add it if not simulating - if (!mergeResult.simulate()) { - mappersToPut.add(mergeWithMapper); - MapperUtils.collect(mergeWithMapper, newObjectMappers, newFieldMappers); - } - } else if (mergeIntoMapper instanceof MetadataFieldMapper == false) { - // root mappers can only exist here for backcompat, and are merged in Mapping - mergeIntoMapper.merge(mergeWithMapper, mergeResult); - } - } - if (!newFieldMappers.isEmpty()) { - mergeResult.addFieldMappers(newFieldMappers); - } - if (!newObjectMappers.isEmpty()) { - mergeResult.addObjectMappers(newObjectMappers); - } - // add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock) - for (Mapper mapper : mappersToPut) { - putMapper(mapper); - } + ObjectMapper merged = clone(); + merged.doMerge(mergeWithObject, updateAllTypes); + return merged; } - protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) { + protected void doMerge(final ObjectMapper mergeWith, boolean updateAllTypes) { + if (nested().isNested()) { + if (!mergeWith.nested().isNested()) { + throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from nested to non-nested"); + } + } else { + if (mergeWith.nested().isNested()) { + throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from non-nested to nested"); + } + } + if (mergeWith.dynamic != null) { + this.dynamic = mergeWith.dynamic; + } + + for (Mapper mergeWithMapper : mergeWith) { + Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); + Mapper merged; + if (mergeIntoMapper == null) { + // no mapping, simply add it + merged = mergeWithMapper; + } else { + // root mappers can only exist here for backcompat, and are merged in Mapping + merged = mergeIntoMapper.merge(mergeWithMapper, updateAllTypes); + } + putMapper(merged); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index a0c989abd7d..c6c64e432b7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -205,6 +205,14 @@ public class RootObjectMapper extends ObjectMapper { this.numericDetection = numericDetection; } + /** Return a copy of this mapper that has the given {@code mapper} as a + * sub mapper. */ + public RootObjectMapper copyAndPutMapper(Mapper mapper) { + RootObjectMapper clone = (RootObjectMapper) clone(); + clone.putMapper(mapper); + return clone; + } + @Override public ObjectMapper mappingUpdate(Mapper mapper) { RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper); @@ -253,25 +261,29 @@ public class RootObjectMapper extends ObjectMapper { } @Override - protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) { + public RootObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) { + return (RootObjectMapper) super.merge(mergeWith, updateAllTypes); + } + + @Override + protected void doMerge(ObjectMapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; - if (!mergeResult.simulate()) { - // merge them - List mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); - for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { - boolean replaced = false; - for (int i = 0; i < mergedTemplates.size(); i++) { - if (mergedTemplates.get(i).name().equals(template.name())) { - mergedTemplates.set(i, template); - replaced = true; - } - } - if (!replaced) { - mergedTemplates.add(template); + // merge them + List mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); + for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { + boolean replaced = false; + for (int i = 0; i < mergedTemplates.size(); i++) { + if (mergedTemplates.get(i).name().equals(template.name())) { + mergedTemplates.set(i, template); + replaced = true; } } - this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); + if (!replaced) { + mergedTemplates.add(template); + } } + this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index ac46f6725de..88e55600bc9 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -110,7 +110,7 @@ public class TranslogRecoveryPerformer { if (currentUpdate == null) { recoveredTypes.put(type, update); } else { - MapperUtils.merge(currentUpdate, update); + currentUpdate = currentUpdate.merge(update, false); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index d94ae2b6735..2fe0cf9f218 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; @@ -39,7 +38,6 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -321,9 +319,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { DocumentMapper docMapperAfter = parser.parse(mappingAfter); - MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true, false); - - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + docMapperBefore.merge(docMapperAfter.mapping(), true, false); docMapperBefore.merge(docMapperAfter.mapping(), false, false); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index ba9303e8b58..1cb41480cb7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.*; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -60,13 +59,11 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapper stage2 = parser.parse(stage2Mapping); - MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); + stage1.merge(stage2.mapping(), true, false); // Just simulated so merge hasn't happened yet assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); - mergeResult = stage1.merge(stage2.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); + stage1.merge(stage2.mapping(), false, false); // Just simulated so merge hasn't happened yet assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index fb67401e334..4772958bdb7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -371,9 +371,8 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { Map config = getConfigurationViaXContent(initialDateFieldMapper); assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); - MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false, false); + defaultMapper.merge(mergeMapper.mapping(), false, false); - assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false)); assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); DateFieldMapper mergedFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index e5d08db8d9f..d07bf21a4be 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; @@ -219,7 +218,7 @@ public class ExternalMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // ignore this for now } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index dae8bc67fda..2731e30a84e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; @@ -66,9 +65,9 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + public void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (!(mergeWith instanceof ExternalMetadataMapper)) { - mergeResult.addConflict("Trying to merge " + mergeWith + " with " + this); + throw new IllegalArgumentException("Trying to merge " + mergeWith + " with " + this); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 93fd71599c4..4efa12fca00 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index 54e9e96f8ad..596efdcc273 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -30,17 +30,13 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.isIn; public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 1a66879c448..b2faf44e657 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; @@ -39,6 +38,7 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -59,15 +59,12 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapper stage2 = parser.parse(stage2Mapping); - MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); + stage1.merge(stage2.mapping(), true, false); // since we are simulating, we should not have the age mapping assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); // now merge, don't simulate - mergeResult = stage1.merge(stage2.mapping(), false, false); - // there is still merge failures - assertThat(mergeResult.hasConflicts(), equalTo(false)); + stage1.merge(stage2.mapping(), false, false); // but we have the age in assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue()); assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); @@ -83,8 +80,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); - MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); + mapper.merge(withDynamicMapper.mapping(), false, false); assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); } @@ -99,14 +95,19 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapper nestedMapper = parser.parse(nestedMapping); - MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.buildConflicts().length, equalTo(1)); - assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested")); + try { + objectMapper.merge(nestedMapper.mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested")); + } - mergeResult = nestedMapper.merge(objectMapper.mapping(), true, false); - assertThat(mergeResult.buildConflicts().length, equalTo(1)); - assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested")); + try { + nestedMapper.merge(objectMapper.mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested")); + } } public void testMergeSearchAnalyzer() throws Exception { @@ -122,9 +123,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper changed = parser.parse(mapping2); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - MergeResult mergeResult = existing.merge(changed.mapping(), false, false); + existing.merge(changed.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); } @@ -141,9 +141,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper changed = parser.parse(mapping2); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - MergeResult mergeResult = existing.merge(changed.mapping(), false, false); + existing.merge(changed.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 30890dcd22a..83e10bd826c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -27,15 +27,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Arrays; - import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -62,8 +58,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); DocumentMapper docMapper2 = parser.parse(mapping); - MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + docMapper.merge(docMapper2.mapping(), true, false); docMapper.merge(docMapper2.mapping(), false, false); @@ -84,8 +79,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); DocumentMapper docMapper3 = parser.parse(mapping); - mergeResult = docMapper.merge(docMapper3.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + docMapper.merge(docMapper3.mapping(), true, false); docMapper.merge(docMapper3.mapping(), false, false); @@ -100,8 +94,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); DocumentMapper docMapper4 = parser.parse(mapping); - mergeResult = docMapper.merge(docMapper4.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + docMapper.merge(docMapper4.mapping(), true, false); docMapper.merge(docMapper4.mapping(), false, false); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 364e9f2063f..c30ea9bc6c6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -34,11 +34,9 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DefaultSourceMappingTests extends ESSingleNodeTestCase { @@ -194,13 +192,18 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException { DocumentMapper docMapper = parser.parse(mapping1); docMapper = parser.parse(docMapper.mappingSource().string()); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false); - - List expectedConflicts = new ArrayList<>(Arrays.asList(conflicts)); - for (String conflict : mergeResult.buildConflicts()) { - assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); + if (conflicts.length == 0) { + docMapper.merge(parser.parse(mapping2).mapping(), true, false); + } else { + try { + docMapper.merge(parser.parse(mapping2).mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + for (String conflict : conflicts) { + assertThat(e.getMessage(), containsString(conflict)); + } + } } - assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); } public void testEnabledNotUpdateable() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 9ac039a49fb..cadd9dd673c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -493,8 +492,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false); - assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts()); + defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 53a3bf7bb6e..d545452db0f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -515,8 +514,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .endObject().endObject().string(); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false, false); - assertThat(mergeResult.buildConflicts().length, equalTo(0)); + docMapper.merge(parser.parse(mapping).mapping(), false, false); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); } @@ -618,9 +616,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .field("index", indexValues.remove(randomInt(2))) .endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService(); - DocumentMapper docMapper = parser.parse(mapping); + mapperService.merge("type", new CompressedXContent(mapping), true, false); mapping = XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("_timestamp") @@ -628,18 +626,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false); - List expectedConflicts = new ArrayList<>(); - expectedConflicts.add("mapper [_timestamp] has different [index] values"); - expectedConflicts.add("mapper [_timestamp] has different [tokenize] values"); - if (indexValues.get(0).equals("not_analyzed") == false) { - // if the only index value left is not_analyzed, then the doc values setting will be the same, but in the - // other two cases, it will change - expectedConflicts.add("mapper [_timestamp] has different [doc_values] values"); - } - - for (String conflict : mergeResult.buildConflicts()) { - assertThat(conflict, isIn(expectedConflicts)); + try { + mapperService.merge("type", new CompressedXContent(mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values")); } } @@ -686,10 +677,15 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException { DocumentMapper docMapper = parser.parse(mapping1); docMapper = parser.parse(docMapper.mappingSource().string()); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false); - assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0 : 1)); - if (conflict != null) { - assertThat(mergeResult.buildConflicts()[0], containsString(conflict)); + if (conflict == null) { + docMapper.merge(parser.parse(mapping2).mapping(), true, false); + } else { + try { + docMapper.merge(parser.parse(mapping2).mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString(conflict)); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index efe07615532..444d692079a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; @@ -116,9 +115,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl); DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl); - MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); + mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true)); } @@ -141,9 +139,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { DocumentMapper initialMapper = parser.parse(mappingWithTtl); DocumentMapper updatedMapper = parser.parse(updatedMapping); - MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false); + initialMapper.merge(updatedMapper.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); } @@ -154,9 +151,13 @@ public class TTLMappingTests extends ESSingleNodeTestCase { DocumentMapper initialMapper = parser.parse(mappingWithTtl); DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled); - MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false); + try { + initialMapper.merge(updatedMapper.mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + } - assertThat(mergeResult.hasConflicts(), equalTo(true)); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); } @@ -189,23 +190,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false); } public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false); } public void testMergeWithOnlyDefaultSet() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -216,8 +214,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -228,8 +225,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false); // make sure simulate flag actually worked - no mappings applied CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); @@ -240,8 +236,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); @@ -252,8 +247,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); @@ -263,8 +257,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { mappingWithoutTtl = getMappingWithTtlDisabled("6d"); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); @@ -273,8 +266,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { // check if switching simulate flag off works if nothing was applied in the beginning indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index abf5f4819cd..e843088c545 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -77,9 +76,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false); - // assure we have no conflicts - assertThat(mergeResult.buildConflicts().length, equalTo(0)); + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false); // make sure mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); @@ -101,9 +98,12 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false); - // assure we have conflicts - assertThat(mergeResult.buildConflicts().length, equalTo(1)); + try { + indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + } // make sure simulate flag actually worked - no mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 4be2b36fbe6..63c142f1e74 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -1176,7 +1176,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .endObject().endObject()).get(); fail(); } catch (IllegalArgumentException e) { - assertThat(e.toString(), containsString("Merge failed with failures {[The _parent field's type option can't be changed: [null]->[parent]")); + assertThat(e.toString(), containsString("The _parent field's type option can't be changed: [null]->[parent]")); } } diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index eb0e143c946..0ba636db72b 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -602,7 +602,7 @@ public class AttachmentMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // ignore this for now } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 60c31c3f765..03b00d2ac39 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -66,8 +66,7 @@ public class Murmur3FieldMapper extends LongFieldMapper { Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (Murmur3FieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index aaf46553a75..fb5d47bdf69 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; @@ -177,12 +176,10 @@ public class SizeFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = sizeFieldMapperMergeWith.enabledState; - } + if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = sizeFieldMapperMergeWith.enabledState; } } } From 5d5c6591aac41fe32b7e26d66756f466d4746d10 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 4 Dec 2015 16:40:37 +0100 Subject: [PATCH 035/322] Validate that fields are defined only once. There are two ways that a field can be defined twice: - by reusing the name of a meta mapper in the root object (`_id`, `_routing`, etc.) - by defining a sub-field both explicitly in the mapping and through the code in a field mapper (like ExternalMapper does) This commit adds new checks in order to make sure this never happens. Close #15057 --- .../index/mapper/MapperService.java | 47 +++++++++++++++++-- .../elasticsearch/index/mapper/Mapping.java | 8 +++- .../ExternalValuesMapperIntegrationIT.java | 4 +- .../mapper/update/UpdateMappingTests.java | 45 ++++++++++++++++++ .../messy/tests/SearchFieldsTests.java | 6 +-- 5 files changed, 98 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 938f610d6db..3a38c465d3d 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; @@ -92,7 +91,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); private volatile FieldTypeLookup fieldTypes; - private volatile ImmutableOpenMap fullPathObjectMappers = ImmutableOpenMap.of(); + private volatile Map fullPathObjectMappers = new HashMap<>(); private boolean hasNested = false; // updated dynamically to true when a nested object is added private final DocumentMapperParser documentParser; @@ -300,8 +299,41 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return true; } + private void checkFieldUniqueness(String type, Collection objectMappers, Collection fieldMappers) { + final Set objectFullNames = new HashSet<>(); + for (ObjectMapper objectMapper : objectMappers) { + final String fullPath = objectMapper.fullPath(); + if (objectFullNames.add(fullPath) == false) { + throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); + } + } + + if (indexSettings.getIndexVersionCreated().before(Version.V_3_0_0)) { + // Before 3.0 some metadata mappers are also registered under the root object mapper + // So we avoid false positives by deduplicating mappers + // given that we check exact equality, this would still catch the case that a mapper + // is defined under the root object + Collection uniqueFieldMappers = Collections.newSetFromMap(new IdentityHashMap<>()); + uniqueFieldMappers.addAll(fieldMappers); + fieldMappers = uniqueFieldMappers; + } + + final Set fieldNames = new HashSet<>(); + for (FieldMapper fieldMapper : fieldMappers) { + final String name = fieldMapper.name(); + if (objectFullNames.contains(name)) { + throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); + } else if (fieldNames.add(name) == false) { + throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); + } + } + } + protected void checkMappersCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { assert mappingLock.isWriteLockedByCurrentThread(); + + checkFieldUniqueness(type, objectMappers, fieldMappers); + for (ObjectMapper newObjectMapper : objectMappers) { ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); if (existingObjectMapper != null) { @@ -313,6 +345,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } } + + for (FieldMapper fieldMapper : fieldMappers) { + if (fullPathObjectMappers.containsKey(fieldMapper.name())) { + throw new IllegalArgumentException("Field [{}] is defined as a field in mapping [" + fieldMapper.name() + "] but this name is already used for an object in other types"); + } + } + fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes); } @@ -330,14 +369,14 @@ public class MapperService extends AbstractIndexComponent implements Closeable { protected void addMappers(String type, Collection objectMappers, Collection fieldMappers) { assert mappingLock.isWriteLockedByCurrentThread(); - ImmutableOpenMap.Builder fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers); + Map fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); for (ObjectMapper objectMapper : objectMappers) { fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); if (objectMapper.nested().isNested()) { hasNested = true; } } - this.fullPathObjectMappers = fullPathObjectMappers.build(); + this.fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java index bac42162552..a89171e4f29 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -27,10 +27,12 @@ import org.elasticsearch.index.mapper.object.RootObjectMapper; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.List; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -41,7 +43,9 @@ import static java.util.Collections.unmodifiableMap; */ public final class Mapping implements ToXContent { - public static final List LEGACY_INCLUDE_IN_OBJECT = Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl"); + // Set of fields that were included into the root object mapper before 2.0 + public static final Set LEGACY_INCLUDE_IN_OBJECT = Collections.unmodifiableSet(new HashSet<>( + Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl"))); final Version indexCreated; final RootObjectMapper root; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 4cf7b405217..7e519c3b722 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -87,7 +87,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .startObject("f") .field("type", ExternalMapperPlugin.EXTERNAL_UPPER) .startObject("fields") - .startObject("f") + .startObject("g") .field("type", "string") .field("store", "yes") .startObject("fields") @@ -107,7 +107,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test-idx") - .setQuery(QueryBuilders.termQuery("f.f.raw", "FOO BAR")) + .setQuery(QueryBuilders.termQuery("f.g.raw", "FOO BAR")) .execute().actionGet(); assertThat(response.getHits().totalHits(), equalTo((long) 1)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index abf5f4819cd..fbcef46d4e6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -202,6 +202,51 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { assertNull(mapperService.documentMapper("type2").mapping().root().getMapper("foo")); } + public void testReuseMetaField() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("_id").field("type", "string").endObject() + .endObject().endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + } + + public void testReuseMetaFieldBackCompat() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("_id").field("type", "string").endObject() + .endObject().endObject().endObject(); + // the logic is different for 2.x indices since they record some meta mappers (including _id) + // in the root object + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); + MapperService mapperService = createIndex("test", settings).mapperService(); + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + } + public void testIndexFieldParsingBackcompat() throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); XContentBuilder indexMapping = XContentFactory.jsonBuilder(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 8153d207b7c..5a56e0f6999 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -392,8 +392,7 @@ public class SearchFieldsTests extends ESIntegTestCase { createIndex("test"); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("_source").field("enabled", false).endObject() + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("byte_field").field("type", "byte").field("store", "yes").endObject() .startObject("short_field").field("type", "short").field("store", "yes").endObject() .startObject("integer_field").field("type", "integer").field("store", "yes").endObject() @@ -556,8 +555,7 @@ public class SearchFieldsTests extends ESIntegTestCase { createIndex("test"); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("_source").field("enabled", false).endObject() + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("string_field").field("type", "string").endObject() .startObject("byte_field").field("type", "byte").endObject() .startObject("short_field").field("type", "short").endObject() From 061446b25ac21fe5b5352a4a1c8a4b042f81575b Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 15 Dec 2015 10:56:53 +0100 Subject: [PATCH 036/322] Merge pull request #15304 from cjohansen/patch-1 Fix typo --- docs/reference/mapping/types/nested.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 07f87037b07..cca87853d83 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -66,7 +66,7 @@ GET my_index/_search ==== Using `nested` fields for arrays of objects If you need to index arrays of objects and to maintain the independence of -each object in the array, you should used the `nested` datatype instead of the +each object in the array, you should use the `nested` datatype instead of the <> datatype. Internally, nested objects index each object in the array as a separate hidden document, meaning that each nested object can be queried independently of the others, with the <>: From 76713611e9d62eb9e53a403077ecfbcc907a32d3 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 11 Dec 2015 15:54:52 +0100 Subject: [PATCH 037/322] Format the term with the formatter in LongTerms.Bucket.getKeyAsString in order to be consistent with the json response field `key_as_string` of the terms aggregation. --- .../search/aggregations/bucket/terms/LongTerms.java | 2 +- .../search/aggregations/bucket/BooleanTermsIT.java | 12 ++++++++---- .../search/aggregations/bucket/NestedIT.java | 8 ++++---- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index 26c2eee2f6b..c270517cd9d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -91,7 +91,7 @@ public class LongTerms extends InternalTerms { @Override public String getKeyAsString() { - return String.valueOf(term); + return formatter.format(term); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java index 0a660b85374..aad2c9bb3ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java @@ -99,20 +99,22 @@ public class BooleanTermsIT extends ESIntegTestCase { final int bucketCount = numSingleFalses > 0 && numSingleTrues > 0 ? 2 : numSingleFalses + numSingleTrues > 0 ? 1 : 0; assertThat(terms.getBuckets().size(), equalTo(bucketCount)); - Terms.Bucket bucket = terms.getBucketByKey("0"); + Terms.Bucket bucket = terms.getBucketByKey("false"); if (numSingleFalses == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numSingleFalses, bucket.getDocCount()); + assertEquals("false", bucket.getKeyAsString()); } - bucket = terms.getBucketByKey("1"); + bucket = terms.getBucketByKey("true"); if (numSingleTrues == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numSingleTrues, bucket.getDocCount()); + assertEquals("true", bucket.getKeyAsString()); } } @@ -131,20 +133,22 @@ public class BooleanTermsIT extends ESIntegTestCase { final int bucketCount = numMultiFalses > 0 && numMultiTrues > 0 ? 2 : numMultiFalses + numMultiTrues > 0 ? 1 : 0; assertThat(terms.getBuckets().size(), equalTo(bucketCount)); - Terms.Bucket bucket = terms.getBucketByKey("0"); + Terms.Bucket bucket = terms.getBucketByKey("false"); if (numMultiFalses == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numMultiFalses, bucket.getDocCount()); + assertEquals("false", bucket.getKeyAsString()); } - bucket = terms.getBucketByKey("1"); + bucket = terms.getBucketByKey("true"); if (numMultiTrues == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numMultiTrues, bucket.getDocCount()); + assertEquals("true", bucket.getKeyAsString()); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index a1f4b20dc1c..349b61fc37e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -423,10 +423,10 @@ public class NestedIT extends ESIntegTestCase { Terms startDate = response.getAggregations().get("startDate"); assertThat(startDate.getBuckets().size(), equalTo(2)); - Terms.Bucket bucket = startDate.getBucketByKey("1414800000000"); // 2014-11-01T00:00:00.000Z + Terms.Bucket bucket = startDate.getBucketByKey("2014-11-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); Terms endDate = bucket.getAggregations().get("endDate"); - bucket = endDate.getBucketByKey("1417305600000"); // 2014-11-30T00:00:00.000Z + bucket = endDate.getBucketByKey("2014-11-30T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); Terms period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-11"); @@ -440,10 +440,10 @@ public class NestedIT extends ESIntegTestCase { Terms tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty - bucket = startDate.getBucketByKey("1417392000000"); // 2014-12-01T00:00:00.000Z + bucket = startDate.getBucketByKey("2014-12-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); endDate = bucket.getAggregations().get("endDate"); - bucket = endDate.getBucketByKey("1419984000000"); // 2014-12-31T00:00:00.000Z + bucket = endDate.getBucketByKey("2014-12-31T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-12"); From 82eb498b29c8372801fc013f041c4145e57752cb Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 15 Dec 2015 12:27:20 +0100 Subject: [PATCH 038/322] Docs: Updated plugin author help for Gradle Relates to #15280 --- docs/plugins/authors.asciidoc | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index 75b7776ec09..9461ba8dd53 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -17,7 +17,7 @@ listed in this documentation for inspiration. ==================================== The example site plugin mentioned above contains all of the scaffolding needed -for integrating with Maven builds. If you don't plan on using Maven, then all +for integrating with Gradle builds. If you don't plan on using Gradle, then all you really need in your plugin is: * The `plugin-descriptor.properties` file @@ -33,14 +33,14 @@ All plugins, be they site or Java plugins, must contain a file called `plugin-descriptor.properties` in the root directory. The format for this file is described in detail here: -https://github.com/elastic/elasticsearch/blob/master/dev-tools/src/main/resources/plugin-metadata/plugin-descriptor.properties[`dev-tools/src/main/resources/plugin-metadata/plugin-descriptor.properties`]. +https://github.com/elastic/elasticsearch/blob/master/buildSrc/src/main/resources/plugin-descriptor.properties[`/buildSrc/src/main/resources/plugin-descriptor.properties`]. Either fill in this template yourself (see https://github.com/lmenezes/elasticsearch-kopf/blob/master/plugin-descriptor.properties[elasticsearch-kopf] -as an example) or, if you are using Elasticsearch's Maven build system, you -can fill in the necessary values in the `pom.xml` for your plugin. For +as an example) or, if you are using Elasticsearch's Gradle build system, you +can fill in the necessary values in the `build.gradle` file for your plugin. For instance, see -https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/pom.xml[`plugins/site-example/pom.xml`]. +https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build.gradle[`/plugins/site-example/build.gradle`]. [float] ==== Mandatory elements for all plugins @@ -123,13 +123,13 @@ Read more in {ref}/integration-tests.html#changing-node-configuration[Changing N === Java Security permissions Some plugins may need additional security permissions. A plugin can include -the optional `plugin-security.policy` file containing `grant` statements for -additional permissions. Any additional permissions will be displayed to the user -with a large warning, and they will have to confirm them when installing the +the optional `plugin-security.policy` file containing `grant` statements for +additional permissions. Any additional permissions will be displayed to the user +with a large warning, and they will have to confirm them when installing the plugin interactively. So if possible, it is best to avoid requesting any spurious permissions! -If you are using the elasticsearch Maven build system, place this file in +If you are using the elasticsearch Gradle build system, place this file in `src/main/plugin-metadata` and it will be applied during unit tests as well. Keep in mind that the Java security model is stack-based, and the additional From 4426ed0a097e1a15df494e61087624540bde94a6 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 15 Dec 2015 14:51:42 +0200 Subject: [PATCH 039/322] [DOCS] Link docs on repository-hdfs plugin relates #15191 --- docs/plugins/repository-hdfs.asciidoc | 7 ++++--- docs/plugins/repository.asciidoc | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index ea13e5ad3a6..6a90859c0ef 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -8,7 +8,7 @@ The HDFS repository plugin adds support for using HDFS File System as a reposito [float] ==== Installation -This plugin can be installed using the plugin manager: +This plugin can be installed using the plugin manager using _one_ of the following packages: [source,sh] ---------------------------------------------------------------- @@ -17,14 +17,14 @@ sudo bin/plugin install repository-hdfs-hadoop2 sudo bin/plugin install repository-hdfs-lite ---------------------------------------------------------------- -The plugin must be installed on every node in the cluster, and each node must +The chosen plugin must be installed on every node in the cluster, and each node must be restarted after installation. [[repository-hdfs-remove]] [float] ==== Removal -The plugin can be removed with the following command: +The plugin can be removed by specifying the _installed_ package using _one_ of the following commands: [source,sh] ---------------------------------------------------------------- @@ -76,6 +76,7 @@ additional permissions which are not provided by the plugin. In this case there are several workarounds: * add the permission into `plugin-security.policy` (available in the plugin folder) + * disable the security manager through `es.security.manager.enabled=false` configurations setting - NOT RECOMMENDED If you find yourself in such a situation, please let us know what Hadoop distro version and OS you are using and what permission is missing diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 554fa34b033..5706fc74c12 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -18,10 +18,9 @@ The S3 repository plugin adds support for using S3 as a repository. The Azure repository plugin adds support for using Azure as a repository. -https://github.com/elastic/elasticsearch-hadoop/tree/master/repository-hdfs[Hadoop HDFS Repository]:: +<>:: -The Hadoop HDFS Repository plugin adds support for using an HDFS file system -as a repository. +The Hadoop HDFS Repository plugin adds support for using HDFS as a repository. [float] @@ -40,3 +39,5 @@ include::repository-azure.asciidoc[] include::repository-s3.asciidoc[] +include::repository-hdfs.asciidoc[] + From d94bba2d9c8a2a717daaf79058c4b16e398cb3eb Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 15 Dec 2015 14:55:23 +0100 Subject: [PATCH 040/322] Remove back compat for the `path` option. The `path` option allowed to index/store a field `a.b.c` under just `c` when set to `just_name`. This "feature" has been removed in 2.0 in favor of `copy_to` so we can remove the back compat in 3.x. --- .../index/mapper/ContentPath.java | 29 ++----------- .../index/mapper/DocumentParser.java | 14 +++---- .../index/mapper/FieldMapper.java | 40 +++--------------- .../index/mapper/core/TypeParsers.java | 31 ++------------ .../mapper/geo/BaseGeoPointFieldMapper.java | 28 ++----------- .../index/mapper/geo/GeoPointFieldMapper.java | 9 ++-- .../mapper/geo/GeoPointFieldMapperLegacy.java | 9 ++-- .../index/mapper/object/DynamicTemplate.java | 4 +- .../index/mapper/object/ObjectMapper.java | 41 +++---------------- .../index/mapper/object/RootObjectMapper.java | 8 ++-- .../mapper/externalvalues/ExternalMapper.java | 4 -- .../mapper/attachments/AttachmentMapper.java | 28 ++----------- 12 files changed, 42 insertions(+), 203 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java b/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java index 47c43720162..54c6ef20e3e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java @@ -19,16 +19,9 @@ package org.elasticsearch.index.mapper; -public class ContentPath { +public final class ContentPath { - public enum Type { - JUST_NAME, - FULL, - } - - private Type pathType; - - private final char delimiter; + private static final char DELIMITER = '.'; private final StringBuilder sb; @@ -47,7 +40,6 @@ public class ContentPath { * number of path elements to not be included in {@link #pathAsText(String)}. */ public ContentPath(int offset) { - this.delimiter = '.'; this.sb = new StringBuilder(); this.offset = offset; reset(); @@ -71,26 +63,11 @@ public class ContentPath { } public String pathAsText(String name) { - if (pathType == Type.JUST_NAME) { - return name; - } - return fullPathAsText(name); - } - - public String fullPathAsText(String name) { sb.setLength(0); for (int i = offset; i < index; i++) { - sb.append(path[i]).append(delimiter); + sb.append(path[i]).append(DELIMITER); } sb.append(name); return sb.toString(); } - - public Type pathType() { - return pathType; - } - - public void pathType(Type type) { - this.pathType = type; - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 656ee2c600d..bb1749d2336 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -234,9 +234,6 @@ class DocumentParser implements Closeable { nestedDoc.add(new Field(TypeFieldMapper.NAME, mapper.nestedTypePathAsString(), TypeFieldMapper.Defaults.FIELD_TYPE)); } - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(mapper.pathType()); - // if we are at the end of the previous object, advance if (token == XContentParser.Token.END_OBJECT) { token = parser.nextToken(); @@ -272,7 +269,6 @@ class DocumentParser implements Closeable { } } // restore the enable path flag - context.path().pathType(origPathType); if (nested.isNested()) { ParseContext.Document nestedDoc = context.doc(); ParseContext.Document parentDoc = nestedDoc.getParent(); @@ -341,7 +337,7 @@ class DocumentParser implements Closeable { context.path().remove(); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object"); if (builder == null) { - builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(mapper.pathType()); + builder = MapperBuilders.object(currentFieldName).enabled(true); // if this is a non root object, then explicitly set the dynamic behavior if set if (!(mapper instanceof RootObjectMapper) && mapper.dynamic() != ObjectMapper.Defaults.DYNAMIC) { ((ObjectMapper.Builder) builder).dynamic(mapper.dynamic()); @@ -610,7 +606,7 @@ class DocumentParser implements Closeable { return null; } final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); - final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().fullPathAsText(currentFieldName)); + final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().pathAsText(currentFieldName)); Mapper.Builder builder = null; if (existingFieldType != null) { // create a builder of the same type @@ -695,7 +691,7 @@ class DocumentParser implements Closeable { if (paths.length > 1) { ObjectMapper parent = context.root(); for (int i = 0; i < paths.length-1; i++) { - mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i])); + mapper = context.docMapper().objectMappers().get(context.path().pathAsText(paths[i])); if (mapper == null) { // One mapping is missing, check if we are allowed to create a dynamic one. ObjectMapper.Dynamic dynamic = parent.dynamic(); @@ -713,12 +709,12 @@ class DocumentParser implements Closeable { if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) { ((ObjectMapper.Builder) builder).dynamic(parent.dynamic()); } - builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType()); + builder = MapperBuilders.object(paths[i]).enabled(true); } Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); mapper = (ObjectMapper) builder.build(builderContext); if (mapper.nested() != ObjectMapper.Nested.NO) { - throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`"); + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); } break; case FALSE: diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9997f8608d2..6f447cdeb86 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -207,11 +207,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return this; } - public T multiFieldPathType(ContentPath.Type pathType) { - multiFieldsBuilder.pathType(pathType); - return builder; - } - public T addMultiField(Mapper.Builder mapperBuilder) { multiFieldsBuilder.add(mapperBuilder); return builder; @@ -242,7 +237,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } protected String buildFullName(BuilderContext context) { - return context.path().fullPathAsText(name); + return context.path().pathAsText(name); } protected void setupFieldType(BuilderContext context) { @@ -540,18 +535,12 @@ public abstract class FieldMapper extends Mapper implements Cloneable { public static class MultiFields { public static MultiFields empty() { - return new MultiFields(ContentPath.Type.FULL, ImmutableOpenMap.of()); + return new MultiFields(ImmutableOpenMap.of()); } public static class Builder { private final ImmutableOpenMap.Builder mapperBuilders = ImmutableOpenMap.builder(); - private ContentPath.Type pathType = ContentPath.Type.FULL; - - public Builder pathType(ContentPath.Type pathType) { - this.pathType = pathType; - return this; - } public Builder add(Mapper.Builder builder) { mapperBuilders.put(builder.name(), builder); @@ -560,13 +549,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable { @SuppressWarnings("unchecked") public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) { - if (pathType == ContentPath.Type.FULL && mapperBuilders.isEmpty()) { + if (mapperBuilders.isEmpty()) { return empty(); - } else if (mapperBuilders.isEmpty()) { - return new MultiFields(pathType, ImmutableOpenMap.of()); } else { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(mainFieldBuilder.name()); ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders; for (ObjectObjectCursor cursor : this.mapperBuilders) { @@ -577,18 +562,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable { mapperBuilders.put(key, mapper); } context.path().remove(); - context.path().pathType(origPathType); ImmutableOpenMap.Builder mappers = mapperBuilders.cast(); - return new MultiFields(pathType, mappers.build()); + return new MultiFields(mappers.build()); } } } - private final ContentPath.Type pathType; private final ImmutableOpenMap mappers; - private MultiFields(ContentPath.Type pathType, ImmutableOpenMap mappers) { - this.pathType = pathType; + private MultiFields(ImmutableOpenMap mappers) { ImmutableOpenMap.Builder builder = new ImmutableOpenMap.Builder<>(); // we disable the all in multi-field mappers for (ObjectObjectCursor cursor : mappers) { @@ -609,21 +591,14 @@ public abstract class FieldMapper extends Mapper implements Cloneable { context = context.createMultiFieldContext(); - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); - context.path().add(mainField.simpleName()); for (ObjectCursor cursor : mappers.values()) { cursor.value.parse(context); } context.path().remove(); - context.path().pathType(origPathType); } public MultiFields merge(MultiFields mergeWith) { - if (pathType != mergeWith.pathType) { - throw new IllegalArgumentException("Can't change path type from [" + pathType + "] to [" + mergeWith.pathType + "]"); - } ImmutableOpenMap.Builder newMappersBuilder = ImmutableOpenMap.builder(mappers); for (ObjectCursor cursor : mergeWith.mappers.values()) { @@ -642,7 +617,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } ImmutableOpenMap mappers = newMappersBuilder.build(); - return new MultiFields(pathType, mappers); + return new MultiFields(mappers); } public Iterator iterator() { @@ -650,9 +625,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (pathType != ContentPath.Type.FULL) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (!mappers.isEmpty()) { // sort the mappers so we get consistent serialization format Mapper[] sortedMappers = mappers.values().toArray(Mapper.class); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index e530243657c..f6bd4946eb2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -61,7 +61,6 @@ public class TypeParsers { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - ContentPath.Type pathType = null; FieldMapper.Builder mainFieldBuilder = null; List fields = null; String firstType = null; @@ -70,10 +69,7 @@ public class TypeParsers { Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); - if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - pathType = parsePathType(name, fieldNode.toString()); - iterator.remove(); - } else if (fieldName.equals("fields")) { + if (fieldName.equals("fields")) { Map fieldsNode = (Map) fieldNode; for (Iterator> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) { Map.Entry entry1 = fieldsIterator.next(); @@ -132,17 +128,10 @@ public class TypeParsers { } } - if (fields != null && pathType != null) { + if (fields != null) { for (Mapper.Builder field : fields) { mainFieldBuilder.addMultiField(field); } - mainFieldBuilder.multiFieldPathType(pathType); - } else if (fields != null) { - for (Mapper.Builder field : fields) { - mainFieldBuilder.addMultiField(field); - } - } else if (pathType != null) { - mainFieldBuilder.multiFieldPathType(pathType); } return mainFieldBuilder; } @@ -337,10 +326,7 @@ public class TypeParsers { public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { parserContext = parserContext.createMultiFieldContext(parserContext); - if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.multiFieldPathType(parsePathType(name, propNode.toString())); - return true; - } else if (propName.equals("fields")) { + if (propName.equals("fields")) { final Map multiFieldsPropNodes; @@ -457,17 +443,6 @@ public class TypeParsers { } } - public static ContentPath.Type parsePathType(String name, String path) throws MapperParsingException { - path = Strings.toUnderscoreCase(path); - if ("just_name".equals(path)) { - return ContentPath.Type.JUST_NAME; - } else if ("full".equals(path)) { - return ContentPath.Type.FULL; - } else { - throw new MapperParsingException("wrong value for pathType [" + path + "] for object [" + name + "]"); - } - } - @SuppressWarnings("unchecked") public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) { FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 2b1d091be3b..f7910e1f5af 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; @@ -73,7 +72,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public static class Defaults { - public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; public static final boolean ENABLE_LATLON = false; public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; @@ -82,7 +80,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public abstract static class Builder extends FieldMapper.Builder { - protected ContentPath.Type pathType = Defaults.PATH_TYPE; protected boolean enableLatLon = Defaults.ENABLE_LATLON; @@ -105,12 +102,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return (GeoPointFieldType)fieldType; } - @Override - public T multiFieldPathType(ContentPath.Type pathType) { - this.pathType = pathType; - return builder; - } - @Override public T fieldDataSettings(Settings settings) { this.fieldDataSettings = settings; @@ -158,13 +149,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); - GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; DoubleFieldMapper latMapper = null; @@ -190,9 +178,8 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix); } context.path().remove(); - context.path().pathType(origPathType); - return build(context, name, fieldType, defaultFieldType, context.indexSettings(), origPathType, + return build(context, name, fieldType, defaultFieldType, context.indexSettings(), latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); } } @@ -364,17 +351,14 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected final DoubleFieldMapper lonMapper; - protected final ContentPath.Type pathType; - protected final StringFieldMapper geoHashMapper; protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.pathType = pathType; this.latMapper = latMapper; this.lonMapper = lonMapper; this.geoHashMapper = geoHashMapper; @@ -434,8 +418,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override public Mapper parse(ParseContext context) throws IOException { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(simpleName()); GeoPoint sparse = context.parseExternalValue(GeoPoint.class); @@ -480,7 +462,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } context.path().remove(); - context.path().pathType(origPathType); return null; } @@ -505,9 +486,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || pathType != Defaults.PATH_TYPE) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 286aca29727..fa61669e800 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -81,12 +80,12 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { @Override public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, + MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); - return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, + return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); } @@ -104,9 +103,9 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { } public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields, + super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 7e5a8738384..735baa88533 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -110,14 +109,14 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement @Override public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, + MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); fieldType.setHasDocValues(false); defaultFieldType.setHasDocValues(false); - return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, + return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo); } @@ -287,10 +286,10 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement protected Explicit coerce; public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields, + super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); this.coerce = coerce; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java b/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java index c51264f3dba..58602f06dfa 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java @@ -125,13 +125,13 @@ public class DynamicTemplate { } public boolean match(ContentPath path, String name, String dynamicType) { - if (pathMatch != null && !patternMatch(pathMatch, path.fullPathAsText(name))) { + if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) { return false; } if (match != null && !patternMatch(match, name)) { return false; } - if (pathUnmatch != null && patternMatch(pathUnmatch, path.fullPathAsText(name))) { + if (pathUnmatch != null && patternMatch(pathUnmatch, path.pathAsText(name))) { return false; } if (unmatch != null && patternMatch(unmatch, name)) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index cbbc8563576..c2d9783fc9f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.CopyOnWriteHashMap; @@ -40,7 +39,6 @@ import java.util.*; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.object; -import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** * @@ -54,7 +52,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, public static final boolean ENABLED = true; public static final Nested NESTED = Nested.NO; public static final Dynamic DYNAMIC = null; // not set, inherited from root - public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; } public static enum Dynamic { @@ -104,8 +101,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, protected Dynamic dynamic = Defaults.DYNAMIC; - protected ContentPath.Type pathType = Defaults.PATH_TYPE; - protected Boolean includeInAll; protected final List mappersBuilders = new ArrayList<>(); @@ -130,11 +125,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return builder; } - public T pathType(ContentPath.Type pathType) { - this.pathType = pathType; - return builder; - } - public T includeInAll(boolean includeInAll) { this.includeInAll = includeInAll; return builder; @@ -147,8 +137,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, @Override public Y build(BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(name); Map mappers = new HashMap<>(); @@ -156,17 +144,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, Mapper mapper = builder.build(context); mappers.put(mapper.simpleName(), mapper); } - context.path().pathType(origPathType); context.path().remove(); - ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers, context.indexSettings()); + ObjectMapper objectMapper = createMapper(name, context.path().pathAsText(name), enabled, nested, dynamic, mappers, context.indexSettings()); objectMapper = objectMapper.includeInAllIfNotSet(includeInAll); return (Y) objectMapper; } - protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers, @Nullable Settings settings) { - return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers); + protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers, @Nullable Settings settings) { + return new ObjectMapper(name, fullPath, enabled, nested, dynamic, mappers); } } @@ -179,7 +166,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); - if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) || parseObjectProperties(name, fieldName, fieldNode, parserContext, builder)) { + if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)) { iterator.remove(); } } @@ -214,14 +201,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return false; } - protected static boolean parseObjectProperties(String name, String fieldName, Object fieldNode, ParserContext parserContext, ObjectMapper.Builder builder) { - if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.pathType(parsePathType(name, fieldNode.toString())); - return true; - } - return false; - } - protected static void parseNested(String name, Map node, ObjectMapper.Builder builder) { boolean nested = false; boolean nestedIncludeInParent = false; @@ -326,19 +305,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, private volatile Dynamic dynamic; - private final ContentPath.Type pathType; - private Boolean includeInAll; private volatile CopyOnWriteHashMap mappers; - ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers) { + ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers) { super(name); this.fullPath = fullPath; this.enabled = enabled; this.nested = nested; this.dynamic = dynamic; - this.pathType = pathType; if (mappers == null) { this.mappers = new CopyOnWriteHashMap<>(); } else { @@ -380,10 +356,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return this.enabled; } - public ContentPath.Type pathType() { - return pathType; - } - public Mapper getMapper(String field) { return mappers.get(field); } @@ -535,9 +507,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, if (enabled != Defaults.ENABLED) { builder.field("enabled", enabled); } - if (pathType != Defaults.PATH_TYPE) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index c6c64e432b7..2fd4e914718 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -95,7 +95,7 @@ public class RootObjectMapper extends ObjectMapper { @Override - protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers, @Nullable Settings settings) { + protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers, @Nullable Settings settings) { assert !nested.isNested(); FormatDateTimeFormatter[] dates = null; if (dynamicDateTimeFormatters == null) { @@ -106,7 +106,7 @@ public class RootObjectMapper extends ObjectMapper { } else { dates = dynamicDateTimeFormatters.toArray(new FormatDateTimeFormatter[dynamicDateTimeFormatters.size()]); } - return new RootObjectMapper(name, enabled, dynamic, pathType, mappers, + return new RootObjectMapper(name, enabled, dynamic, mappers, dates, dynamicTemplates.toArray(new DynamicTemplate[dynamicTemplates.size()]), dateDetection, numericDetection); @@ -196,9 +196,9 @@ public class RootObjectMapper extends ObjectMapper { private volatile DynamicTemplate dynamicTemplates[]; - RootObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map mappers, + RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map mappers, FormatDateTimeFormatter[] dynamicDateTimeFormatters, DynamicTemplate dynamicTemplates[], boolean dateDetection, boolean numericDetection) { - super(name, name, enabled, Nested.NO, dynamic, pathType, mappers); + super(name, name, enabled, Nested.NO, dynamic, mappers); this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; this.dateDetection = dateDetection; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index d07bf21a4be..e7b7c26b84f 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -95,9 +95,6 @@ public class ExternalMapper extends FieldMapper { @Override public ExternalMapper build(BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(ContentPath.Type.FULL); - context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); @@ -107,7 +104,6 @@ public class ExternalMapper extends FieldMapper { FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); - context.path().pathType(origPathType); setupFieldType(context); return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 0ba636db72b..66ecbe0850b 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -37,7 +37,6 @@ import java.util.*; import static org.elasticsearch.index.mapper.MapperBuilders.*; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** *

@@ -65,7 +64,6 @@ public class AttachmentMapper extends FieldMapper {
     public static final String CONTENT_TYPE = "attachment";
 
     public static class Defaults {
-        public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
 
         public static final AttachmentFieldType FIELD_TYPE = new AttachmentFieldType();
         static {
@@ -108,8 +106,6 @@ public class AttachmentMapper extends FieldMapper {
 
     public static class Builder extends FieldMapper.Builder {
 
-        private ContentPath.Type pathType = Defaults.PATH_TYPE;
-
         private Boolean ignoreErrors = null;
 
         private Integer defaultIndexedChars = null;
@@ -140,11 +136,6 @@ public class AttachmentMapper extends FieldMapper {
             this.contentBuilder = stringField(FieldNames.CONTENT);
         }
 
-        public Builder pathType(ContentPath.Type pathType) {
-            this.pathType = pathType;
-            return this;
-        }
-
         public Builder content(Mapper.Builder content) {
             this.contentBuilder = content;
             return this;
@@ -192,8 +183,6 @@ public class AttachmentMapper extends FieldMapper {
 
         @Override
         public AttachmentMapper build(BuilderContext context) {
-            ContentPath.Type origPathType = context.path().pathType();
-            context.path().pathType(pathType);
 
             FieldMapper contentMapper;
             if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
@@ -220,8 +209,6 @@ public class AttachmentMapper extends FieldMapper {
             FieldMapper language = (FieldMapper) languageBuilder.build(context);
             context.path().remove();
 
-            context.path().pathType(origPathType);
-
             if (defaultIndexedChars == null && context.indexSettings() != null) {
                 defaultIndexedChars = context.indexSettings().getAsInt("index.mapping.attachment.indexed_chars", 100000);
             }
@@ -257,7 +244,7 @@ public class AttachmentMapper extends FieldMapper {
 
             defaultFieldType.freeze();
             this.setupFieldType(context);
-            return new AttachmentMapper(name, fieldType, defaultFieldType, pathType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
+            return new AttachmentMapper(name, fieldType, defaultFieldType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
                     dateMapper, titleMapper, nameMapper, authorMapper, keywordsMapper, contentTypeMapper, contentLength,
                     language, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
         }
@@ -309,10 +296,7 @@ public class AttachmentMapper extends FieldMapper {
                 Map.Entry entry = iterator.next();
                 String fieldName = entry.getKey();
                 Object fieldNode = entry.getValue();
-                if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
-                    builder.pathType(parsePathType(name, fieldNode.toString()));
-                    iterator.remove();
-                } else if (fieldName.equals("fields")) {
+                if (fieldName.equals("fields")) {
                     Map fieldsNode = (Map) fieldNode;
                     for (Iterator> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
                         Map.Entry entry1 = fieldsIterator.next();
@@ -375,8 +359,6 @@ public class AttachmentMapper extends FieldMapper {
         }
     }
 
-    private final ContentPath.Type pathType;
-
     private final int defaultIndexedChars;
 
     private final boolean ignoreErrors;
@@ -401,13 +383,12 @@ public class AttachmentMapper extends FieldMapper {
 
     private final FieldMapper languageMapper;
 
-    public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, ContentPath.Type pathType, int defaultIndexedChars, Boolean ignoreErrors,
+    public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, int defaultIndexedChars, Boolean ignoreErrors,
                             Boolean defaultLangDetect, FieldMapper contentMapper,
                             FieldMapper dateMapper, FieldMapper titleMapper, FieldMapper nameMapper, FieldMapper authorMapper,
                             FieldMapper keywordsMapper, FieldMapper contentTypeMapper, FieldMapper contentLengthMapper,
                             FieldMapper languageMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
         super(simpleName, type, defaultFieldType, indexSettings, multiFields, copyTo);
-        this.pathType = pathType;
         this.defaultIndexedChars = defaultIndexedChars;
         this.ignoreErrors = ignoreErrors;
         this.defaultLangDetect = defaultLangDetect;
@@ -626,9 +607,6 @@ public class AttachmentMapper extends FieldMapper {
     public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
         builder.startObject(simpleName());
         builder.field("type", CONTENT_TYPE);
-        if (indexCreatedBefore2x) {
-            builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
-        }
 
         builder.startObject("fields");
         contentMapper.toXContent(builder, params);

From 0ce88b5887324b74969a87306a43780b732ee28f Mon Sep 17 00:00:00 2001
From: umeku 
Date: Tue, 15 Dec 2015 13:44:58 +0200
Subject: [PATCH 041/322] Fix inaccurate docs for nested datatype

Closes #15436
---
 docs/reference/mapping/types/nested.asciidoc | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc
index cca87853d83..b4bb06e236c 100644
--- a/docs/reference/mapping/types/nested.asciidoc
+++ b/docs/reference/mapping/types/nested.asciidoc
@@ -110,7 +110,7 @@ GET my_index/_search
         "bool": {
           "must": [
             { "match": { "user.first": "Alice" }},
-            { "match": { "user.last":  "White" }} <2>
+            { "match": { "user.last":  "Smith" }} <2>
           ]
         }
       }
@@ -127,7 +127,7 @@ GET my_index/_search
         "bool": {
           "must": [
             { "match": { "user.first": "Alice" }},
-            { "match": { "user.last":  "Smith" }} <3>
+            { "match": { "user.last":  "White" }} <3>
           ]
         }
       },
@@ -137,14 +137,14 @@ GET my_index/_search
             "user.first": {}
           }
         }
-      }
+
     }
   }
 }
 --------------------------------------------------
 // AUTOSENSE
 <1> The `user` field is mapped as type `nested` instead of type `object`.
-<2> This query doesn't match because `Alice` and `White` are not in the same nested object.
+<2> This query doesn't match because `Alice` and `Smith` are not in the same nested object.
 <3> This query matches because `Alice` and `White` are in the same nested object.
 <4> `inner_hits` allow us to highlight the matching nested documents.
 

From 07044e02b99a8453b099ca690337e19a771f232f Mon Sep 17 00:00:00 2001
From: Boaz Leskes 
Date: Tue, 15 Dec 2015 15:25:17 +0100
Subject: [PATCH 042/322] IndexService: remove unneed inject annotation from

---
 core/src/main/java/org/elasticsearch/index/IndexService.java | 1 -
 1 file changed, 1 deletion(-)

diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index 92ca00231b5..829d85f3a96 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -93,7 +93,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
     private final AtomicBoolean deleted = new AtomicBoolean(false);
     private final IndexSettings indexSettings;
 
-    @Inject
     public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
                         SimilarityService similarityService,
                         ShardStoreDeleter shardStoreDeleter,

From 210657a453290f8eb0210822517bab86cb103592 Mon Sep 17 00:00:00 2001
From: Costin Leau 
Date: Tue, 15 Dec 2015 16:44:27 +0200
Subject: [PATCH 043/322] [DOC] escape # in programlisting

---
 docs/plugins/repository-hdfs.asciidoc | 22 ++++++++++++----------
 1 file changed, 12 insertions(+), 10 deletions(-)

diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc
index 6a90859c0ef..114dbf13035 100644
--- a/docs/plugins/repository-hdfs.asciidoc
+++ b/docs/plugins/repository-hdfs.asciidoc
@@ -88,24 +88,26 @@ by raising an issue. Thank you!
 Once installed, define the configuration for the `hdfs` repository through `elasticsearch.yml` or the
 {ref}/modules-snapshots.html[REST API]:
 
-[source]
+[source,yaml]
 ----
 repositories
   hdfs:
-    uri: "hdfs://:/"    # optional - Hadoop file-system URI
-    path: "some/path"               # required - path with the file-system where data is stored/loaded
-    load_defaults: "true"           # optional - whether to load the default Hadoop configuration (default) or not
-    conf_location: "extra-cfg.xml"  # optional - Hadoop configuration XML to be loaded (use commas for multi values)
-    conf. : ""          # optional - 'inlined' key=value added to the Hadoop configuration
-    concurrent_streams: 5           # optional - the number of concurrent streams (defaults to 5)
-    compress: "false"               # optional - whether to compress the metadata or not (default)
-    chunk_size: "10mb"              # optional - chunk size (disabled by default)
+    uri: "hdfs://:/"    \# optional - Hadoop file-system URI
+    path: "some/path"               \# required - path with the file-system where data is stored/loaded
+    load_defaults: "true"           \# optional - whether to load the default Hadoop configuration (default) or not
+    conf_location: "extra-cfg.xml"  \# optional - Hadoop configuration XML to be loaded (use commas for multi values)
+    conf. : ""          \# optional - 'inlined' key=value added to the Hadoop configuration
+    concurrent_streams: 5           \# optional - the number of concurrent streams (defaults to 5)
+    compress: "false"               \# optional - whether to compress the metadata or not (default)
+    chunk_size: "10mb"              \# optional - chunk size (disabled by default)
+    
 ----
 
 NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while
 others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead.
 
-===== Plugging other file-systems
+[[repository-hdfs-other-fs]]
+==== Plugging other file-systems
 
 Any HDFS-compatible file-systems (like Amazon `s3://` or Google `gs://`) can be used as long as the proper Hadoop
 configuration is passed to the Elasticsearch plugin. In practice, this means making sure the correct Hadoop configuration

From 4f445688dd59c01f61cfca64e3094e7bbd097cb4 Mon Sep 17 00:00:00 2001
From: Simon Willnauer 
Date: Tue, 15 Dec 2015 16:20:58 +0100
Subject: [PATCH 044/322] apply feedback from @bleskes

---
 .../ClusterRebalanceAllocationDecider.java    |  4 +--
 .../ConcurrentRebalanceAllocationDecider.java |  4 +--
 .../decider/DiskThresholdDecider.java         |  2 +-
 .../decider/ThrottlingAllocationDecider.java  |  5 +--
 .../service/InternalClusterService.java       |  4 +--
 .../settings/AbstractScopedSettings.java      |  1 +
 .../common/settings/ClusterSettings.java      |  5 ++-
 .../common/settings/Setting.java              | 36 +++++++++++++------
 .../HierarchyCircuitBreakerService.java       | 15 ++++----
 .../java/org/elasticsearch/node/Node.java     |  2 +-
 .../elasticsearch/threadpool/ThreadPool.java  |  2 +-
 .../common/settings/SettingTests.java         |  2 +-
 .../UpdateThreadPoolSettingsTests.java        | 12 +++----
 .../NettySizeHeaderFrameDecoderTests.java     |  2 +-
 14 files changed, 56 insertions(+), 40 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
index 0e5e744d274..b1be2a6fce4 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
@@ -79,7 +79,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider {
         }
     }
 
-    private ClusterRebalanceType type;
+    private volatile ClusterRebalanceType type;
 
     @Inject
     public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
@@ -95,7 +95,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider {
         clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType);
     }
 
-    public void setType(ClusterRebalanceType type) {
+    private void setType(ClusterRebalanceType type) {
         this.type = type;
     }
 
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java
index 21023400e32..504ea5da215 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java
@@ -42,7 +42,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider {
 
     public static final String NAME = "concurrent_rebalance";
 
-    public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, true, Setting.Scope.CLUSTER);
+    public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, 0, true, Setting.Scope.CLUSTER);
     private volatile int clusterConcurrentRebalance;
 
     @Inject
@@ -53,7 +53,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider {
         clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, this::setClusterConcurrentRebalance);
     }
 
-    public void setClusterConcurrentRebalance(int concurrentRebalance) {
+    private void setClusterConcurrentRebalance(int concurrentRebalance) {
         clusterConcurrentRebalance = concurrentRebalance;
     }
 
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
index 49cf5abadd4..400ed70f808 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
@@ -550,7 +550,7 @@ public class DiskThresholdDecider extends AllocationDecider {
 
     /**
      * Checks if a watermark string is a valid percentage or byte size value,
-     * returning true if valid, false if invalid.
+     * @return the watermark value given
      */
     public static String validWatermarkSetting(String watermark, String settingName) {
         try {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java
index 9e3d96b4e18..b97e6138674 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.cluster.routing.allocation.decider;
 
+import org.elasticsearch.ElasticsearchParseException;
 import org.elasticsearch.cluster.routing.RoutingNode;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
@@ -52,8 +53,8 @@ public class ThrottlingAllocationDecider extends AllocationDecider {
     public static final String NAME = "throttling";
     public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries";
 
-    public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, true, Setting.Scope.CLUSTER);
-    public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), Integer::parseInt, true, Setting.Scope.CLUSTER);
+    public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, Setting.Scope.CLUSTER);
+    public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, Setting.Scope.CLUSTER);
 
     private volatile int primariesInitialRecoveries;
     private volatile int concurrentRecoveries;
diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
index 09f15994848..402df7251db 100644
--- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
@@ -64,7 +64,7 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF
 public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService {
 
     public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER);
-    public static final String SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL = "cluster.service.reconnect_interval";
+    public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.service.reconnect_interval",  TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER);
 
     public static final String UPDATE_THREAD_NAME = "clusterService#updateTask";
     private final ThreadPool threadPool;
@@ -123,7 +123,7 @@ public class InternalClusterService extends AbstractLifecycleComponent extends ToXContentToBytes {
         try {
             return parser.apply(value);
         } catch (ElasticsearchParseException ex) {
-            throw ex;
+            throw new IllegalArgumentException(ex.getMessage(), ex);
         } catch (Exception t) {
             throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", t);
         }
@@ -146,6 +144,7 @@ public class Setting extends ToXContentToBytes {
         builder.field("key", key);
         builder.field("type", scope.name());
         builder.field("dynamic", dynamic);
+        builder.field("is_group_setting", isGroupSetting());
         builder.field("default", defaultValue.apply(Settings.EMPTY));
         builder.endObject();
         return builder;
@@ -163,9 +162,9 @@ public class Setting extends ToXContentToBytes {
         return newUpdater(consumer, logger, (s) -> {});
     }
 
-    AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer accept) {
+    AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) {
         if (isDynamic()) {
-            return new Updater(consumer, logger, accept);
+            return new Updater(consumer, logger, validator);
         } else {
             throw new IllegalStateException("setting [" + getKey() + "] is not dynamic");
         }
@@ -222,6 +221,9 @@ public class Setting extends ToXContentToBytes {
         public boolean hasChanged(Settings current, Settings previous) {
             final String newValue = getRaw(current);
             final String value = getRaw(previous);
+            assert isGroupSetting() == false : "group settings must override this method";
+            assert value != null : "value was null but can't be unless default is null which is invalid";
+
             return value.equals(newValue) == false;
         }
 
@@ -258,14 +260,26 @@ public class Setting extends ToXContentToBytes {
         return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> {
             float value = Float.parseFloat(s);
             if (value < minValue) {
-                throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
+                throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
             }
             return value;
         }, dynamic, scope);
     }
 
+    public static Setting intSetting(String key, int defaultValue, int minValue, boolean dynamic, Scope scope) {
+        return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope);
+    }
+
+    public static int parseInt(String s, int minValue, String key) {
+        int value = Integer.parseInt(s);
+        if (value < minValue) {
+            throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
+        }
+        return value;
+    }
+
     public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) {
-        return new Setting<>(key, (s) -> Integer.toString(defaultValue), Integer::parseInt, dynamic, scope);
+        return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope);
     }
 
     public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) {
@@ -306,7 +320,7 @@ public class Setting extends ToXContentToBytes {
             }
 
             @Override
-            public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer accept) {
+            public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) {
                 if (isDynamic() == false) {
                     throw new IllegalStateException("setting [" + getKey() + "] is not dynamic");
                 }
@@ -325,7 +339,7 @@ public class Setting extends ToXContentToBytes {
                         Settings currentSettings = get(current);
                         Settings previousSettings = get(previous);
                         try {
-                            accept.accept(currentSettings);
+                            validator.accept(currentSettings);
                         } catch (Exception | AssertionError e) {
                             throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + previousSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e);
                         }
@@ -350,7 +364,7 @@ public class Setting extends ToXContentToBytes {
         return new Setting<>(key, defaultValue, (s) -> {
             TimeValue timeValue = TimeValue.parseTimeValue(s, null, key);
             if (timeValue.millis() < minValue.millis()) {
-                throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
+                throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
             }
             return timeValue;
         }, dynamic, scope);
@@ -368,7 +382,7 @@ public class Setting extends ToXContentToBytes {
         return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> {
             final double d = Double.parseDouble(s);
             if (d < minValue) {
-                throw new ElasticsearchParseException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
+                throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
             }
             return d;
         }, dynamic, scope);
diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
index 4c2a2ced09e..0e1532bc6b3 100644
--- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
+++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
@@ -50,13 +50,13 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
 
     public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.CLUSTER);
     public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.CLUSTER);
-    public static final String FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.fielddata.type";
+    public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING =  new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER);
 
     public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.CLUSTER);
     public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.CLUSTER);
-    public static final String REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.request.type";
+    public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER);
+
 
-    public static final String DEFAULT_BREAKER_TYPE = "memory";
 
     private volatile BreakerSettings parentSettings;
     private volatile BreakerSettings fielddataSettings;
@@ -71,13 +71,13 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
         this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA,
                 FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(),
                 FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings),
-                CircuitBreaker.Type.parseValue(settings.get(FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE))
+                FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.get(settings)
         );
 
         this.requestSettings = new BreakerSettings(CircuitBreaker.REQUEST,
                 REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(),
                 REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings),
-                CircuitBreaker.Type.parseValue(settings.get(REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE))
+                REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.get(settings)
         );
 
         this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), 1.0, CircuitBreaker.Type.PARENT);
@@ -93,10 +93,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
         clusterSettings.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit);
     }
     private void setRequestBreakerLimit(ByteSizeValue newRequestMax, Double newRequestOverhead) {
-        long newRequestLimitBytes = newRequestMax == null ? HierarchyCircuitBreakerService.this.requestSettings.getLimit() : newRequestMax.bytes();
-        newRequestOverhead = newRequestOverhead == null ? HierarchyCircuitBreakerService.this.requestSettings.getOverhead() : newRequestOverhead;
-
-        BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestLimitBytes, newRequestOverhead,
+        BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestMax.bytes(), newRequestOverhead,
                 HierarchyCircuitBreakerService.this.requestSettings.getType());
         registerBreaker(newRequestSettings);
         HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings;
diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
index 3460539f5b8..a28e532d399 100644
--- a/core/src/main/java/org/elasticsearch/node/Node.java
+++ b/core/src/main/java/org/elasticsearch/node/Node.java
@@ -202,7 +202,7 @@ public class Node implements Releasable {
             injector = modules.createInjector();
 
             client = injector.getInstance(Client.class);
-            threadPool.setNodeSettingsService(injector.getInstance(ClusterSettings.class));
+            threadPool.setClusterSettings(injector.getInstance(ClusterSettings.class));
             success = true;
         } catch (IOException ex) {
             throw new ElasticsearchException("failed to bind service", ex);
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index 935fa2231cb..56e02926ed6 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -250,7 +250,7 @@ public class ThreadPool extends AbstractComponent {
         this.estimatedTimeThread.start();
     }
 
-    public void setNodeSettingsService(ClusterSettings clusterSettings) {
+    public void setClusterSettings(ClusterSettings clusterSettings) {
         if(settingsListenerIsSet.compareAndSet(false, true)) {
             clusterSettings.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> validate(s.getAsGroups()));
         } else {
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
index d8ac616eccb..1640cfdd3b5 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
@@ -47,7 +47,7 @@ public class SettingTests extends ESTestCase {
         try {
             settingUpdater.apply(Settings.builder().put("a.byte.size", 12).build(), Settings.EMPTY);
             fail("no unit");
-        } catch (ElasticsearchParseException ex) {
+        } catch (IllegalArgumentException ex) {
             assertEquals("failed to parse setting [a.byte.size] with value [12] as a size in bytes: unit is missing or unrecognized", ex.getMessage());
         }
 
diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
index 07f34071442..56b2a03bad1 100644
--- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
@@ -92,7 +92,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
         try {
             threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build());
             ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
 
             clusterSettings.applySettings(
                     settingsBuilder()
@@ -118,7 +118,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
                     .put("name", "testCachedExecutorType").build();
             threadPool = new ThreadPool(nodeSettings);
             ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
 
             assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED);
             assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
@@ -169,7 +169,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
                     .put("name", "testFixedExecutorType").build();
             threadPool = new ThreadPool(nodeSettings);
             ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
             assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class));
             Settings settings = clusterSettings.applySettings(settingsBuilder()
                     .put("threadpool." + threadPoolName + ".size", "15")
@@ -224,7 +224,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
                     .put("name", "testScalingExecutorType").build();
             threadPool = new ThreadPool(nodeSettings);
             ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
             assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1));
             assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10));
             assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L));
@@ -262,7 +262,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
                     .put("name", "testCachedExecutorType").build();
             threadPool = new ThreadPool(nodeSettings);
             ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
             assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L);
 
             final CountDownLatch latch = new CountDownLatch(1);
@@ -299,7 +299,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
                     .put("name", "testCustomThreadPool").build();
             threadPool = new ThreadPool(nodeSettings);
             ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
-            threadPool.setNodeSettingsService(clusterSettings);
+            threadPool.setClusterSettings(clusterSettings);
             ThreadPoolInfo groups = threadPool.info();
             boolean foundPool1 = false;
             boolean foundPool2 = false;
diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
index 02fffda722a..7a3fd88f93b 100644
--- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
@@ -64,7 +64,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
     @Before
     public void startThreadPool() {
         threadPool = new ThreadPool(settings);
-        threadPool.setNodeSettingsService(new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
+        threadPool.setClusterSettings(new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
         NetworkService networkService = new NetworkService(settings);
         BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(settings, threadPool), new NoneCircuitBreakerService());
         nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, Version.CURRENT, new NamedWriteableRegistry());

From dbd7b5abbc2584811d5cc011b1f7052c86ba7f5b Mon Sep 17 00:00:00 2001
From: Simon Willnauer 
Date: Tue, 15 Dec 2015 16:39:48 +0100
Subject: [PATCH 045/322] fix several tests

---
 .../org/elasticsearch/common/settings/ClusterSettings.java    | 4 ----
 .../main/java/org/elasticsearch/common/settings/Setting.java  | 2 ++
 .../org/elasticsearch/cluster/settings/ClusterSettingsIT.java | 4 ++--
 3 files changed, 4 insertions(+), 6 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
index 89cdd045170..4e922ca28e3 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
@@ -96,10 +96,6 @@ public final class ClusterSettings extends AbstractScopedSettings {
         IndicesTTLService.INDICES_TTL_INTERVAL_SETTING,
         MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING,
         MetaData.SETTING_READ_ONLY_SETTING,
-        RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE_SETTING,
-        RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS_SETTING,
-        RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE_SETTING,
-        RecoverySettings.INDICES_RECOVERY_COMPRESS_SETTING,
         RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING,
         RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING,
         RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING,
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
index b15634b35e3..ba9573e0bbf 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -116,6 +116,8 @@ public class Setting extends ToXContentToBytes {
             return parser.apply(value);
         } catch (ElasticsearchParseException ex) {
             throw new IllegalArgumentException(ex.getMessage(), ex);
+        } catch (IllegalArgumentException ex) {
+            throw ex;
         } catch (Exception t) {
             throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", t);
         }
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 3a028fe54b0..5dccbb546ce 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -256,7 +256,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
                     .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "whatever").build())
                     .get();
             fail("bogus value");
-        } catch (ElasticsearchParseException ex) {
+        } catch (IllegalArgumentException ex) {
             assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.commit_timeout] with value [whatever] as a time value: unit is missing or unrecognized");
         }
 
@@ -268,7 +268,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
                     .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), -1).build())
                     .get();
             fail("bogus value");
-        } catch (ElasticsearchParseException ex) {
+        } catch (IllegalArgumentException ex) {
             assertEquals(ex.getMessage(), "Failed to parse value [-1] for setting [discovery.zen.publish_timeout] must be >= 0s");
         }
 

From 91b55ded6ace919af6f372fa93d5930d7a14b58b Mon Sep 17 00:00:00 2001
From: Lee Hinman 
Date: Tue, 15 Dec 2015 08:48:43 -0700
Subject: [PATCH 046/322] Trace log REST test headers

---
 .../elasticsearch/test/rest/client/http/HttpRequestBuilder.java  | 1 +
 1 file changed, 1 insertion(+)

diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java
index 34665efa0f1..f6ce416dbff 100644
--- a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java
+++ b/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java
@@ -168,6 +168,7 @@ public class HttpRequestBuilder {
             logger.trace("sending request \n{}", stringBuilder.toString());
         }
         for (Map.Entry entry : this.headers.entrySet()) {
+            logger.trace("adding header [{} => {}]", entry.getKey(), entry.getValue());
             httpUriRequest.addHeader(entry.getKey(), entry.getValue());
         }
         try (CloseableHttpResponse closeableHttpResponse = httpClient.execute(httpUriRequest)) {

From 6a7fc8fb321de87dbf0efda8ba5d50c8ad206471 Mon Sep 17 00:00:00 2001
From: Boaz Leskes 
Date: Tue, 15 Dec 2015 17:10:32 +0100
Subject: [PATCH 047/322] IndexService: format and remove unneeded import

---
 .../org/elasticsearch/index/IndexService.java | 34 ++++++++++++-------
 1 file changed, 22 insertions(+), 12 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index 829d85f3a96..a6b66742c55 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -29,7 +29,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.common.inject.Inject;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentFactory;
 import org.elasticsearch.common.xcontent.XContentParser;
@@ -48,7 +47,13 @@ import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.MapperService;
 import org.elasticsearch.index.query.ParsedQuery;
 import org.elasticsearch.index.query.QueryShardContext;
-import org.elasticsearch.index.shard.*;
+import org.elasticsearch.index.shard.IndexEventListener;
+import org.elasticsearch.index.shard.IndexSearcherWrapper;
+import org.elasticsearch.index.shard.IndexShard;
+import org.elasticsearch.index.shard.ShadowIndexShard;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.index.shard.ShardNotFoundException;
+import org.elasticsearch.index.shard.ShardPath;
 import org.elasticsearch.index.similarity.SimilarityService;
 import org.elasticsearch.index.store.IndexStore;
 import org.elasticsearch.index.store.Store;
@@ -73,7 +78,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
 /**
  *
  */
-public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable{
+public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable {
 
     private final IndexEventListener eventListener;
     private final AnalysisService analysisService;
@@ -145,7 +150,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
      */
     @Nullable
     public IndexShard getShardOrNull(int shardId) {
-         return shards.get(shardId);
+        return shards.get(shardId);
     }
 
     /**
@@ -159,13 +164,17 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
         return indexShard;
     }
 
-    public Set shardIds() { return shards.keySet(); }
+    public Set shardIds() {
+        return shards.keySet();
+    }
 
     public IndexCache cache() {
         return indexCache;
     }
 
-    public IndexFieldDataService fieldData() { return indexFieldData; }
+    public IndexFieldDataService fieldData() {
+        return indexFieldData;
+    }
 
     public AnalysisService analysisService() {
         return this.analysisService;
@@ -206,7 +215,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
     private long getAvgShardSizeInBytes() throws IOException {
         long sum = 0;
         int count = 0;
-        for(IndexShard indexShard : this) {
+        for (IndexShard indexShard : this) {
             sum += indexShard.store().stats().sizeInBytes();
             count++;
         }
@@ -253,17 +262,17 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
                 // TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard
                 // that's being relocated/replicated we know how large it will become once it's done copying:
                 // Count up how many shards are currently on each data path:
-                Map dataPathToShardCount = new HashMap<>();
-                for(IndexShard shard : this) {
+                Map dataPathToShardCount = new HashMap<>();
+                for (IndexShard shard : this) {
                     Path dataPath = shard.shardPath().getRootStatePath();
                     Integer curCount = dataPathToShardCount.get(dataPath);
                     if (curCount == null) {
                         curCount = 0;
                     }
-                    dataPathToShardCount.put(dataPath, curCount+1);
+                    dataPathToShardCount.put(dataPath, curCount + 1);
                 }
                 path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
-                                                       dataPathToShardCount);
+                    dataPathToShardCount);
                 logger.debug("{} creating using a new path [{}]", shardId, path);
             } else {
                 logger.debug("{} creating using an existing path [{}]", shardId, path);
@@ -276,7 +285,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
             logger.debug("creating shard_id {}", shardId);
             // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
             final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
-                    (primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
+                (primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
             store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> nodeServicesProvider.getIndicesQueryCache().onClose(shardId)));
             if (useShadowEngine(primary, indexSettings)) {
                 indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider);
@@ -461,6 +470,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
             }
         }
     }
+
     /**
      * Returns the filter associated with listed filtering aliases.
      * 

From 82a64fd2f8c7e1ddc1b1643c826b323a38316f6d Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 15 Dec 2015 17:21:31 +0100 Subject: [PATCH 048/322] Fix SearchWhileRelocatingIT so that the type of the mapping matches the type of documents. --- .../main/java/org/elasticsearch/index/mapper/MapperService.java | 2 +- .../org/elasticsearch/search/basic/SearchWhileRelocatingIT.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 86610283d75..de35b4712ea 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -338,7 +338,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { for (FieldMapper fieldMapper : fieldMappers) { if (fullPathObjectMappers.containsKey(fieldMapper.name())) { - throw new IllegalArgumentException("Field [{}] is defined as a field in mapping [" + fieldMapper.name() + "] but this name is already used for an object in other types"); + throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index 69c4bbdbd11..1f421292371 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -61,7 +61,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { final int numShards = between(1, 20); client().admin().indices().prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_shards", numShards).put("index.number_of_replicas", numberOfReplicas)) - .addMapping("type1", "loc", "type=geo_point", "test", "type=string").execute().actionGet(); + .addMapping("type", "loc", "type=geo_point", "test", "type=string").execute().actionGet(); ensureGreen(); List indexBuilders = new ArrayList<>(); final int numDocs = between(10, 20); From 015ead0c458e9fe455c148cef068090f840b8c9b Mon Sep 17 00:00:00 2001 From: soinlv Date: Tue, 15 Dec 2015 16:00:55 -0500 Subject: [PATCH 049/322] Remove unused import Closes #15458 --- .../java/org/elasticsearch/index/store/IndexStoreConfig.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 1bd023abdb0..6ce709c4f62 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -22,7 +22,6 @@ import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.node.settings.NodeSettingsService; From d24c83b6907c30f2bf79f153b3e4aa8c5819d654 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Dec 2015 17:38:30 -0500 Subject: [PATCH 050/322] Cleanup o/e/c/r/RoutingNodes.java This commit applies a few trivial cleanups to o/e/c/r/RoutingNodes.java. --- .../cluster/routing/RoutingNodes.java | 34 ++++++++----------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 5d17a59339a..8dd980c8bb3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -31,7 +30,14 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.shard.ShardId; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Predicate; /** @@ -78,7 +84,7 @@ public class RoutingNodes implements Iterable { Map> nodesToShards = new HashMap<>(); // fill in the nodeToShards with the "live" nodes for (ObjectCursor cursor : clusterState.nodes().dataNodes().values()) { - nodesToShards.put(cursor.value.id(), new ArrayList()); + nodesToShards.put(cursor.value.id(), new ArrayList<>()); } // fill in the inverse of node -> shards allocated @@ -91,21 +97,13 @@ public class RoutingNodes implements Iterable { // by the ShardId, as this is common for primary and replicas. // A replica Set might have one (and not more) replicas with the state of RELOCATING. if (shard.assignedToNode()) { - List entries = nodesToShards.get(shard.currentNodeId()); - if (entries == null) { - entries = new ArrayList<>(); - nodesToShards.put(shard.currentNodeId(), entries); - } + List entries = nodesToShards.computeIfAbsent(shard.currentNodeId(), k -> new ArrayList<>()); final ShardRouting sr = getRouting(shard, readOnly); entries.add(sr); assignedShardsAdd(sr); if (shard.relocating()) { - entries = nodesToShards.get(shard.relocatingNodeId()); relocatingShards++; - if (entries == null) { - entries = new ArrayList<>(); - nodesToShards.put(shard.relocatingNodeId(), entries); - } + entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(), k -> new ArrayList<>()); // add the counterpart shard with relocatingNodeId reflecting the source from which // it's relocating from. ShardRouting targetShardRouting = shard.buildTargetRelocatingShard(); @@ -121,7 +119,7 @@ public class RoutingNodes implements Iterable { inactiveShardCount++; } } else { - final ShardRouting sr = getRouting(shard, readOnly); + final ShardRouting sr = getRouting(shard, readOnly); assignedShardsAdd(sr); unassignedShards.add(sr); } @@ -449,12 +447,8 @@ public class RoutingNodes implements Iterable { // no unassigned return; } - List shards = assignedShards.get(shard.shardId()); - if (shards == null) { - shards = new ArrayList<>(); - assignedShards.put(shard.shardId(), shards); - } - assert assertInstanceNotInList(shard, shards); + List shards = assignedShards.computeIfAbsent(shard.shardId(), k -> new ArrayList<>()); + assert assertInstanceNotInList(shard, shards); shards.add(shard); } From c8d199bc83623a1fc232799b0faa8630df4769f4 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Dec 2015 18:51:38 -0800 Subject: [PATCH 051/322] Organize and annotate gitignores --- .gitignore | 47 ++++++++++++++++++++++++++--------------------- 1 file changed, 26 insertions(+), 21 deletions(-) diff --git a/.gitignore b/.gitignore index 31f2aa5fc66..b7d5d95a256 100644 --- a/.gitignore +++ b/.gitignore @@ -1,37 +1,42 @@ + +# intellij files .idea/ -.gradle/ *.iml *.ipr *.iws -work/ -/data/ -logs/ -.DS_Store -build/ -generated-resources/ -**/.local* -docs/html/ -docs/build.log -/tmp/ -backwards/ -html_docs -.vagrant/ -## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects) -## All files (.project, .classpath, .settings/*) should be generated through Maven which -## will correctly set the classpath based on the declared dependencies and write settings -## files to ensure common coding style across Eclipse and IDEA. +# eclipse files .project .classpath eclipse-build .settings -## netbeans ignores +# netbeans files nb-configuration.xml nbactions.xml -dependency-reduced-pom.xml +# gradle stuff +.gradle/ +build/ +generated-resources/ -# old patterns specific to maven +# maven stuff (to be removed when trunk becomes 4.x) *-execution-hints.log target/ +dependency-reduced-pom.xml + +# testing stuff +**/.local* +.vagrant/ + +# osx stuff +.DS_Store + +# random old stuff that we should look at the necessity of... +docs/html/ +docs/build.log +html_docs +/tmp/ +backwards/ + + From 36bd8450900072ef260c19a8293406cfe0f7ccab Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Dec 2015 15:10:22 -0500 Subject: [PATCH 052/322] Reorganize o/e/c/a/s/ShardStateAction.java This commit is a trivial reorganization of o/e/c/a/s/ShardStateAction.java. The primary motive is have all of the shard failure handling grouped together, and all of the shard started handling grouped together. --- .../action/shard/ShardStateAction.java | 168 +++++++++--------- 1 file changed, 88 insertions(+), 80 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index d09df094a68..c2ac791aa16 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -20,7 +20,11 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; @@ -37,7 +41,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; @@ -45,11 +56,8 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; -/** - * - */ -public class ShardStateAction extends AbstractComponent { +public class ShardStateAction extends AbstractComponent { public static final String SHARD_STARTED_ACTION_NAME = "internal:cluster/shard/started"; public static final String SHARD_FAILED_ACTION_NAME = "internal:cluster/shard/failure"; @@ -97,52 +105,26 @@ public class ShardStateAction extends AbstractComponent { options = TransportRequestOptions.builder().withTimeout(timeout).build(); } transportService.sendRequest(masterNode, - SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleResponse(TransportResponse.Empty response) { - listener.onSuccess(); - } + SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleResponse(TransportResponse.Empty response) { + listener.onSuccess(); + } - @Override - public void handleException(TransportException exp) { - logger.warn("failed to send failed shard to {}", exp, masterNode); - listener.onShardFailedFailure(masterNode, exp); - } - }); + @Override + public void handleException(TransportException exp) { + logger.warn("failed to send failed shard to {}", exp, masterNode); + listener.onShardFailedFailure(masterNode, exp); + } + }); } - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason) { - DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); - if (masterNode == null) { - logger.warn("{} can't send shard started for {}, no master known.", shardRouting.shardId(), shardRouting); - return; + private class ShardFailedTransportHandler implements TransportRequestHandler { + @Override + public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { + handleShardFailureOnMaster(request); + channel.sendResponse(TransportResponse.Empty.INSTANCE); } - shardStarted(shardRouting, indexUUID, reason, masterNode); - } - - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason, final DiscoveryNode masterNode) { - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); - logger.debug("{} sending shard started for {}", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - transportService.sendRequest(masterNode, - SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleException(TransportException exp) { - logger.warn("failed to send shard started to [{}]", exp, masterNode); - } - - }); - } - - private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); - - private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - clusterService.submitStateUpdateTask( - "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.HIGH), - shardFailedClusterStateHandler, - shardFailedClusterStateHandler); } class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { @@ -168,10 +150,10 @@ public class ShardStateAction extends AbstractComponent { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - if (oldState != newState && newState.getRoutingNodes().unassigned().size() > 0) { - logger.trace("unassigned shards after shard failures. scheduling a reroute."); - routingService.reroute("unassigned shards after shard failures, scheduling a reroute"); - } + if (oldState != newState && newState.getRoutingNodes().unassigned().size() > 0) { + logger.trace("unassigned shards after shard failures. scheduling a reroute."); + routingService.reroute("unassigned shards after shard failures, scheduling a reroute"); + } } @Override @@ -180,18 +162,45 @@ public class ShardStateAction extends AbstractComponent { } } - private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = - new ShardStartedClusterStateHandler(); - - private void shardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.debug("received shard started for {}", shardRoutingEntry); + private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); + private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry) { + logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); clusterService.submitStateUpdateTask( - "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.URGENT), - shardStartedClusterStateHandler, - shardStartedClusterStateHandler); + "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.HIGH), + shardFailedClusterStateHandler, + shardFailedClusterStateHandler); + } + + public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason) { + DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); + if (masterNode == null) { + logger.warn("{} can't send shard started for {}, no master known.", shardRouting.shardId(), shardRouting); + return; + } + shardStarted(shardRouting, indexUUID, reason, masterNode); + } + + public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason, final DiscoveryNode masterNode) { + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); + logger.debug("{} sending shard started for {}", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); + transportService.sendRequest(masterNode, + SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleException(TransportException exp) { + logger.warn("failed to send shard started to [{}]", exp, masterNode); + } + }); + } + + class ShardStartedTransportHandler implements TransportRequestHandler { + @Override + public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { + handleShardStartedOnMaster(request); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } } class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { @@ -223,26 +232,20 @@ public class ShardStateAction extends AbstractComponent { } } - private class ShardFailedTransportHandler implements TransportRequestHandler { + private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = new ShardStartedClusterStateHandler(); - @Override - public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - handleShardFailureOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - } + private void handleShardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { + logger.debug("received shard started for {}", shardRoutingEntry); - class ShardStartedTransportHandler implements TransportRequestHandler { - - @Override - public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - shardStartedOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } + clusterService.submitStateUpdateTask( + "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.URGENT), + shardStartedClusterStateHandler, + shardStartedClusterStateHandler); } public static class ShardRoutingEntry extends TransportRequest { - ShardRouting shardRouting; String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; String message; @@ -283,8 +286,13 @@ public class ShardStateAction extends AbstractComponent { } public interface Listener { - default void onSuccess() {} - default void onShardFailedNoMaster() {} - default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) {} + default void onSuccess() { + } + + default void onShardFailedNoMaster() { + } + + default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) { + } } } From 082632dcacc3baa18971af23dcb91d310eba92c7 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 15 Dec 2015 21:20:56 +0100 Subject: [PATCH 053/322] aggs: fixed bug in children agg that prevented all child docs from being evaluated Before we only evaluated segments that yielded matches in parent aggs, which caused us to miss to evaluate child docs in segments we didn't have parent matches for. The fix for this is stop remember in what segments we have matches for and simply evaluate all segments. This makes the code simpler and we can still quickly see if a segment doesn't hold child docs like we did before. --- .../children/ParentToChildrenAggregator.java | 16 +---- .../aggregations/bucket/ChildrenIT.java | 62 +++++++++++++++++++ 2 files changed, 65 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java index 6d9a1edc712..0678338fcf7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.aggregations.bucket.children; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.*; @@ -64,9 +65,6 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { private final LongObjectPagedHashMap parentOrdToOtherBuckets; private boolean multipleBucketsPerParentOrd = false; - // This needs to be a Set to avoid duplicate reader context entries via (#setNextReader(...), it can get invoked multiple times with the same reader context) - private Set replay = new LinkedHashSet<>(); - public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, String parentType, Query childFilter, Query parentFilter, ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, @@ -99,17 +97,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - if (replay == null) { - throw new IllegalStateException(); - } final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); assert globalOrdinals != null; Scorer parentScorer = parentFilter.scorer(ctx); final Bits parentDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer); - if (childFilter.scorer(ctx) != null) { - replay.add(ctx); - } return new LeafBucketCollector() { @Override @@ -138,10 +130,8 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { @Override protected void doPostCollection() throws IOException { - final Set replay = this.replay; - this.replay = null; - - for (LeafReaderContext ctx : replay) { + IndexReader indexReader = context().searchContext().searcher().getIndexReader(); + for (LeafReaderContext ctx : indexReader.leaves()) { DocIdSetIterator childDocsIter = childFilter.scorer(ctx); if (childDocsIter == null) { continue; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index b6611a956af..540420c21bc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -18,12 +18,15 @@ */ package org.elasticsearch.search.aggregations.bucket; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.children.Children; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -392,6 +395,65 @@ public class ChildrenIT extends ESIntegTestCase { assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1l)); } + public void testPostCollectAllLeafReaders() throws Exception { + // The 'towns' and 'parent_names' aggs operate on parent docs and if child docs are in different segments we need + // to ensure those segments which child docs are also evaluated to in the post collect phase. + + // Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused + // us to miss to evaluate child docs in segments we didn't have parent matches for. + + assertAcked( + prepareCreate("index") + .addMapping("parentType", "name", "type=string,index=not_analyzed", "town", "type=string,index=not_analyzed") + .addMapping("childType", "_parent", "type=parentType", "name", "type=string,index=not_analyzed", "age", "type=integer") + ); + List requests = new ArrayList<>(); + requests.add(client().prepareIndex("index", "parentType", "1").setSource("name", "Bob", "town", "Memphis")); + requests.add(client().prepareIndex("index", "parentType", "2").setSource("name", "Alice", "town", "Chicago")); + requests.add(client().prepareIndex("index", "parentType", "3").setSource("name", "Bill", "town", "Chicago")); + requests.add(client().prepareIndex("index", "childType", "1").setSource("name", "Jill", "age", 5).setParent("1")); + requests.add(client().prepareIndex("index", "childType", "2").setSource("name", "Joey", "age", 3).setParent("1")); + requests.add(client().prepareIndex("index", "childType", "3").setSource("name", "John", "age", 2).setParent("2")); + requests.add(client().prepareIndex("index", "childType", "4").setSource("name", "Betty", "age", 6).setParent("3")); + requests.add(client().prepareIndex("index", "childType", "5").setSource("name", "Dan", "age", 1).setParent("3")); + indexRandom(true, requests); + + SearchResponse response = client().prepareSearch("index") + .setSize(0) + .addAggregation(AggregationBuilders.terms("towns").field("town") + .subAggregation(AggregationBuilders.terms("parent_names").field("name") + .subAggregation(AggregationBuilders.children("child_docs").childType("childType")) + ) + ) + .get(); + + Terms towns = response.getAggregations().get("towns"); + assertThat(towns.getBuckets().size(), equalTo(2)); + assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago")); + assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L)); + + Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(2)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + Children children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(1L)); + + assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill")); + assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(1).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + + assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis")); + assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L)); + parents = towns.getBuckets().get(1).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(1)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + } + private static final class Control { final String category; From 34703a838d71c32e3bf944e95c09120dd443b075 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Dec 2015 15:45:36 +0100 Subject: [PATCH 054/322] Beef up TranslogTests with concurrent fatal exceptions test Today we only test this when writing sequentially. Yet, in practice we mainly write concurrently, this commit adds a test that tests that concurrent writes with sudden fatal failure will not corrupt our translog. Relates to #15420 --- .../index/translog/Translog.java | 16 +- .../index/translog/TranslogTests.java | 187 ++++++++++++++---- 2 files changed, 163 insertions(+), 40 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 4016695dd89..6329e5cc998 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -158,7 +158,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { if (translogGeneration != null) { - final Checkpoint checkpoint = Checkpoint.read(location.resolve(CHECKPOINT_FILE_NAME)); + final Checkpoint checkpoint = readCheckpoint(); this.recoveredTranslogs = recoverFromFiles(translogGeneration, checkpoint); if (recoveredTranslogs.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); @@ -545,6 +545,15 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } + } catch (AlreadyClosedException | IOException ex) { + if (current.getTragicException() != null) { + try { + close(); + } catch (Exception inner) { + ex.addSuppressed(inner); + } + } + throw ex; } return false; } @@ -1433,4 +1442,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return current.getTragicException(); } + /** Reads and returns the current checkpoint */ + final Checkpoint readCheckpoint() throws IOException { + return Checkpoint.read(location.resolve(CHECKPOINT_FILE_NAME)); + } + } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index e35c04dcd6b..3ede7355524 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.mockfile.FilterFileChannel; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase; @@ -62,6 +63,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import static org.hamcrest.Matchers.*; @@ -1242,11 +1244,11 @@ public class TranslogTests extends ESTestCase { private final CountDownLatch downLatch; private final int opsPerThread; private final int threadId; - private final BlockingQueue writtenOperations; + private final Collection writtenOperations; private final Throwable[] threadExceptions; private final Translog translog; - public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, BlockingQueue writtenOperations, Throwable[] threadExceptions) { + public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Throwable[] threadExceptions) { this.translog = translog; this.downLatch = downLatch; this.opsPerThread = opsPerThread; @@ -1276,59 +1278,34 @@ public class TranslogTests extends ESTestCase { throw new ElasticsearchException("not supported op type"); } - Translog.Location loc = translog.add(op); + Translog.Location loc = add(op); writtenOperations.add(new LocationOperation(op, loc)); + afterAdd(); } } catch (Throwable t) { threadExceptions[threadId] = t; } } + + protected Translog.Location add(Translog.Operation op) throws IOException { + return translog.add(op); + } + + protected void afterAdd() throws IOException {} } public void testFailFlush() throws IOException { Path tempDir = createTempDir(); - final AtomicBoolean simulateDiskFull = new AtomicBoolean(); + final AtomicBoolean fail = new AtomicBoolean(); TranslogConfig config = getTranslogConfig(tempDir); - Translog translog = new Translog(config) { - @Override - TranslogWriter.ChannelFactory getChannelFactory() { - final TranslogWriter.ChannelFactory factory = super.getChannelFactory(); - - return new TranslogWriter.ChannelFactory() { - @Override - public FileChannel open(Path file) throws IOException { - FileChannel channel = factory.open(file); - return new FilterFileChannel(channel) { - - @Override - public int write(ByteBuffer src) throws IOException { - if (simulateDiskFull.get()) { - if (src.limit() > 1) { - final int pos = src.position(); - final int limit = src.limit(); - src.limit(limit / 2); - super.write(src); - src.position(pos); - src.limit(limit); - throw new IOException("__FAKE__ no space left on device"); - } - } - return super.write(src); - } - }; - } - }; - } - }; + Translog translog = getFailableTranslog(fail, config); List locations = new ArrayList<>(); int opsSynced = 0; - int opsAdded = 0; boolean failed = false; while(failed == false) { try { locations.add(translog.add(new Translog.Index("test", "" + opsSynced, Integer.toString(opsSynced).getBytes(Charset.forName("UTF-8"))))); - opsAdded++; translog.sync(); opsSynced++; } catch (IOException ex) { @@ -1336,9 +1313,9 @@ public class TranslogTests extends ESTestCase { assertFalse(translog.isOpen()); assertEquals("__FAKE__ no space left on device", ex.getMessage()); } - simulateDiskFull.set(randomBoolean()); + fail.set(randomBoolean()); } - simulateDiskFull.set(false); + fail.set(false); if (randomBoolean()) { try { locations.add(translog.add(new Translog.Index("test", "" + opsSynced, Integer.toString(opsSynced).getBytes(Charset.forName("UTF-8"))))); @@ -1402,4 +1379,136 @@ public class TranslogTests extends ESTestCase { } } } + + public void testFatalIOExceptionsWhileWritingConcurrently() throws IOException, InterruptedException { + Path tempDir = createTempDir(); + final AtomicBoolean fail = new AtomicBoolean(false); + + TranslogConfig config = getTranslogConfig(tempDir); + Translog translog = getFailableTranslog(fail, config); + + final int threadCount = randomIntBetween(1, 5); + Thread[] threads = new Thread[threadCount]; + final Throwable[] threadExceptions = new Throwable[threadCount]; + final CountDownLatch downLatch = new CountDownLatch(1); + final CountDownLatch added = new CountDownLatch(randomIntBetween(10, 100)); + List writtenOperations = Collections.synchronizedList(new ArrayList<>()); + for (int i = 0; i < threadCount; i++) { + final int threadId = i; + threads[i] = new TranslogThread(translog, downLatch, 200, threadId, writtenOperations, threadExceptions) { + @Override + protected Translog.Location add(Translog.Operation op) throws IOException { + Translog.Location add = super.add(op); + added.countDown(); + return add; + } + + @Override + protected void afterAdd() throws IOException { + if (randomBoolean()) { + translog.sync(); + } + } + }; + threads[i].setDaemon(true); + threads[i].start(); + } + downLatch.countDown(); + added.await(); + try (Translog.View view = translog.newView()) { + // this holds a reference to the current tlog channel such that it's not closed + // if we hit a tragic event. this is important to ensure that asserts inside the Translog#add doesn't trip + // otherwise our assertions here are off by one sometimes. + fail.set(true); + for (int i = 0; i < threadCount; i++) { + threads[i].join(); + } + Collections.sort(writtenOperations, (a, b) -> a.location.compareTo(b.location)); + assertFalse(translog.isOpen()); + final Checkpoint checkpoint = Checkpoint.read(config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME)); + Iterator iterator = writtenOperations.iterator(); + while (iterator.hasNext()) { + LocationOperation next = iterator.next(); + if (checkpoint.offset < (next.location.translogLocation + next.location.size)) { + // drop all that haven't been synced + iterator.remove(); + } + } + config.setTranslogGeneration(translog.getGeneration()); + try (Translog tlog = new Translog(config)) { + try (Translog.Snapshot snapshot = tlog.newSnapshot()) { + if (writtenOperations.size() != snapshot.estimatedTotalOperations()) { + for (int i = 0; i < threadCount; i++) { + if (threadExceptions[i] != null) + threadExceptions[i].printStackTrace(); + } + } + assertEquals(writtenOperations.size(), snapshot.estimatedTotalOperations()); + for (int i = 0; i < writtenOperations.size(); i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(next, writtenOperations.get(i).operation); + } + } + } + } + } + + private Translog getFailableTranslog(final AtomicBoolean fail, final TranslogConfig config) throws IOException { + return new Translog(config) { + @Override + TranslogWriter.ChannelFactory getChannelFactory() { + final TranslogWriter.ChannelFactory factory = super.getChannelFactory(); + + return new TranslogWriter.ChannelFactory() { + @Override + public FileChannel open(Path file) throws IOException { + FileChannel channel = factory.open(file); + return new ThrowingFileChannel(fail, randomBoolean(), channel); + } + }; + } + }; + } + + public static class ThrowingFileChannel extends FilterFileChannel { + private final AtomicBoolean fail; + private final boolean partialWrite; + + public ThrowingFileChannel(AtomicBoolean fail, boolean partialWrite, FileChannel delegate) { + super(delegate); + this.fail = fail; + this.partialWrite = partialWrite; + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int write(ByteBuffer src, long position) throws IOException { + throw new UnsupportedOperationException(); + } + + + public int write(ByteBuffer src) throws IOException { + if (fail.get()) { + if (partialWrite) { + if (src.limit() > 1) { + final int pos = src.position(); + final int limit = src.limit(); + src.limit(limit / 2); + super.write(src); + src.position(pos); + src.limit(limit); + throw new IOException("__FAKE__ no space left on device"); + } + } + throw new MockDirectoryWrapper.FakeIOException(); + } + return super.write(src); + } + } } From d0dbfce49a6988aa91b607b5f4d529d844fe4710 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 10:38:19 +0100 Subject: [PATCH 055/322] allow -1 for concurrent rebalance, it's a special value :( --- .../decider/ConcurrentRebalanceAllocationDecider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 504ea5da215..a9ad35fd526 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -42,7 +42,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, 0, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, true, Setting.Scope.CLUSTER); private volatile int clusterConcurrentRebalance; @Inject From 7a469538bca9c7db04437ed6ad002ab02273458c Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Tue, 15 Dec 2015 18:50:04 +0100 Subject: [PATCH 056/322] serialize os name, arch and version too These three properties are build in the jason response but were not transported when a node sends the response. closes #15422 --- .../elasticsearch/monitor/os/DummyOsInfo.java | 34 +++++ .../org/elasticsearch/monitor/os/OsInfo.java | 6 + .../plugins/DummyPluginInfo.java | 28 ++++ .../nodesinfo/NodeInfoStreamingTests.java | 140 ++++++++++++++++++ 4 files changed, 208 insertions(+) create mode 100644 core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java create mode 100644 core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java create mode 100644 core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java diff --git a/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java new file mode 100644 index 00000000000..599755e78a4 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.monitor.os; + +public class DummyOsInfo extends OsInfo { + + DummyOsInfo() { + refreshInterval = 0; + availableProcessors = 0; + allocatedProcessors = 0; + name = "dummy_name"; + arch = "dummy_arch"; + version = "dummy_version"; + } + + public static final DummyOsInfo INSTANCE = new DummyOsInfo(); +} diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java index f34cd51a143..d94447221c3 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java @@ -108,6 +108,9 @@ public class OsInfo implements Streamable, ToXContent { refreshInterval = in.readLong(); availableProcessors = in.readInt(); allocatedProcessors = in.readInt(); + name = in.readOptionalString(); + arch = in.readOptionalString(); + version = in.readOptionalString(); } @Override @@ -115,5 +118,8 @@ public class OsInfo implements Streamable, ToXContent { out.writeLong(refreshInterval); out.writeInt(availableProcessors); out.writeInt(allocatedProcessors); + out.writeOptionalString(name); + out.writeOptionalString(arch); + out.writeOptionalString(version); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java new file mode 100644 index 00000000000..a57a96c631d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -0,0 +1,28 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugins; + +public class DummyPluginInfo extends PluginInfo { + + private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { + super(name, description, site, version, jvm, classname, isolated); + } + + public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true); +} diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java new file mode 100644 index 00000000000..693ba4a2eba --- /dev/null +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.nodesinfo; + +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.os.DummyOsInfo; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.monitor.process.ProcessInfo; +import org.elasticsearch.plugins.DummyPluginInfo; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPoolInfo; +import org.elasticsearch.transport.TransportInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.core.IsEqual.equalTo; + +/** + * + */ +public class NodeInfoStreamingTests extends ESTestCase { + + public void testNodeInfoStreaming() throws IOException { + NodeInfo nodeInfo = createNodeInfo(); + Version version = Version.CURRENT; + BytesStreamOutput out = new BytesStreamOutput(); + out.setVersion(version); + nodeInfo.writeTo(out); + out.close(); + StreamInput in = StreamInput.wrap(out.bytes()); + in.setVersion(version); + NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); + assertExpectedUnchanged(nodeInfo, readNodeInfo); + + } + // checks all properties that are expected to be unchanged. Once we start changing them between versions this method has to be changed as well + private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { + assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); + assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); + assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); + assertThat(nodeInfo.getServiceAttributes().size(), equalTo(readNodeInfo.getServiceAttributes().size())); + for (Map.Entry entry : nodeInfo.getServiceAttributes().entrySet()) { + assertNotNull(readNodeInfo.getServiceAttributes().get(entry.getKey())); + assertThat(readNodeInfo.getServiceAttributes().get(entry.getKey()), equalTo(entry.getValue())); + } + compareJsonOutput(nodeInfo.getHttp(), readNodeInfo.getHttp()); + compareJsonOutput(nodeInfo.getJvm(), readNodeInfo.getJvm()); + compareJsonOutput(nodeInfo.getProcess(), readNodeInfo.getProcess()); + compareJsonOutput(nodeInfo.getSettings(), readNodeInfo.getSettings()); + compareJsonOutput(nodeInfo.getThreadPool(), readNodeInfo.getThreadPool()); + compareJsonOutput(nodeInfo.getTransport(), readNodeInfo.getTransport()); + compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); + compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs()); + comparePluginsAndModules(nodeInfo, readNodeInfo); + } + + private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder pluginsAndModules = jsonBuilder(); + pluginsAndModules.startObject(); + nodeInfo.getPlugins().toXContent(pluginsAndModules, params); + pluginsAndModules.endObject(); + XContentBuilder readPluginsAndModules = jsonBuilder(); + readPluginsAndModules.startObject(); + readNodeInfo.getPlugins().toXContent(readPluginsAndModules, params); + readPluginsAndModules.endObject(); + assertThat(pluginsAndModules.string(), equalTo(readPluginsAndModules.string())); + } + + private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException { + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder param1Builder = jsonBuilder(); + XContentBuilder param2Builder = jsonBuilder(); + param1.toXContent(param1Builder, params); + param2.toXContent(param2Builder, params); + assertThat(param1Builder.string(), equalTo(param2Builder.string())); + } + + + private NodeInfo createNodeInfo() { + Build build = Build.CURRENT; + DiscoveryNode node = new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, VersionUtils.randomVersion(random())); + Map serviceAttributes = new HashMap<>(); + serviceAttributes.put("test", "attribute"); + Settings settings = Settings.builder().put("test", "setting").build(); + OsInfo osInfo = DummyOsInfo.INSTANCE; + ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean()); + JvmInfo jvm = JvmInfo.jvmInfo(); + List threadPoolInfos = new ArrayList<>(); + threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); + ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); + Map profileAddresses = new HashMap<>(); + BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE}, DummyTransportAddress.INSTANCE); + profileAddresses.put("test_address", dummyBoundTransportAddress); + TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); + HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); + PluginsAndModules plugins = new PluginsAndModules(); + plugins.addModule(DummyPluginInfo.INSTANCE); + plugins.addPlugin(DummyPluginInfo.INSTANCE); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins); + } +} From 20dc8556440eed7ee95f34fadf3a2f222a8b108b Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 16 Dec 2015 11:27:45 +0100 Subject: [PATCH 057/322] [TEST] Fix ScriptServiceTests.testFineGrainedSettings that can loop indefinitely --- .../test/java/org/elasticsearch/script/ScriptServiceTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 23cada02c6c..5abdc4f79bb 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -225,7 +225,7 @@ public class ScriptServiceTests extends ESTestCase { } while (scriptContextSettings.containsKey(scriptContext)); scriptContextSettings.put(scriptContext, randomFrom(ScriptMode.values())); } - int numEngineSettings = randomIntBetween(0, 10); + int numEngineSettings = randomIntBetween(0, ScriptType.values().length * scriptContexts.length); Map engineSettings = new HashMap<>(); for (int i = 0; i < numEngineSettings; i++) { String settingKey; From 9cf2f42d15675e09ed03613c17356d89502b5623 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 11:52:30 +0100 Subject: [PATCH 058/322] fix list/array settings --- .../org/elasticsearch/common/Booleans.java | 3 +- .../common/settings/Setting.java | 56 ++++++++++++++++++- .../common/settings/Settings.java | 2 + .../transport/TransportService.java | 23 ++++---- .../common/settings/SettingTests.java | 56 ++++++++++++++++++- 5 files changed, 122 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/Booleans.java b/core/src/main/java/org/elasticsearch/common/Booleans.java index 6b1b9b016a7..9c5f5746633 100644 --- a/core/src/main/java/org/elasticsearch/common/Booleans.java +++ b/core/src/main/java/org/elasticsearch/common/Booleans.java @@ -84,7 +84,6 @@ public class Booleans { * throws exception if string cannot be parsed to boolean */ public static Boolean parseBooleanExact(String value) { - boolean isFalse = isExplicitFalse(value); if (isFalse) { return false; @@ -94,7 +93,7 @@ public class Booleans { return true; } - throw new IllegalArgumentException("value cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ] "); + throw new IllegalArgumentException("Failed to parse value [" + value + "] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); } public static Boolean parseBoolean(String value, Boolean defaultValue) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index ba9573e0bbf..758bff5c6cc 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -18,9 +18,11 @@ */ package org.elasticsearch.common.settings; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; @@ -30,6 +32,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.*; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; @@ -38,7 +42,7 @@ import java.util.function.Function; */ public class Setting extends ToXContentToBytes { private final String key; - private final Function defaultValue; + protected final Function defaultValue; private final Function parser; private final boolean dynamic; private final Scope scope; @@ -127,7 +131,7 @@ public class Setting extends ToXContentToBytes { * Returns the raw (string) settings value. If the setting is not present in the given settings object the default value is returned * instead. This is useful if the value can't be parsed due to an invalid value to access the actual value. */ - public final String getRaw(Settings settings) { + public String getRaw(Settings settings) { return settings.get(key, defaultValue.apply(settings)); } @@ -300,6 +304,54 @@ public class Setting extends ToXContentToBytes { return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, scope); } + public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { + Function> parser = (s) -> { + try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){ + XContentParser.Token token = xContentParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("expected START_ARRAY but got " + token); + } + ArrayList list = new ArrayList<>(); + while ((token = xContentParser.nextToken()) !=XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new IllegalArgumentException("expected VALUE_STRING but got " + token); + } + list.add(singleValueParser.apply(xContentParser.text())); + } + return list; + } catch (IOException e) { + throw new IllegalArgumentException("failed to parse array", e); + } + }; + return new Setting>(key, arrayToParsableString(defaultStringValue.toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { + + @Override + public String getRaw(Settings settings) { + String[] array = settings.getAsArray(key, null); + + return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); + } + + + }; + } + + private static String arrayToParsableString(String[] array) { + try { + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); + builder.startArray(); + for (String element : array) { + builder.value(element); + } + builder.endArray(); + return builder.string(); + } catch (IOException ex) { + throw new ElasticsearchException(ex); + } + } + + + public static Setting groupSetting(String key, boolean dynamic, Scope scope) { if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 05f3cb1ff0b..f8dd5d4f1f6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -597,6 +597,8 @@ public final class Settings implements ToXContent { return result.toArray(new String[result.size()]); } + + /** * Returns group settings for the given setting prefix. */ diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 05d5242ac82..916c4863e07 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -42,10 +42,7 @@ import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; @@ -87,8 +84,8 @@ public class TransportService extends AbstractLifecycleComponent TRACE_LOG_INCLUDE_SETTING = new Setting<>("transport.tracer.include", "", Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER); - public static final Setting TRACE_LOG_EXCLUDE_SETTING = new Setting<>("transport.tracer.exclude", "internal:discovery/zen/fd*," + TransportLivenessAction.NAME, Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER);; + public static final Setting> TRACE_LOG_INCLUDE_SETTING = Setting.listSetting("transport.tracer.include", Collections.emptyList(), (s) -> s, true, Setting.Scope.CLUSTER); + public static final Setting> TRACE_LOG_EXCLUDE_SETTING = Setting.listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), (s) -> s, true, Setting.Scope.CLUSTER); private final ESLogger tracerLog; @@ -107,8 +104,8 @@ public class TransportService extends AbstractLifecycleComponent tracerLogInclude) { + this.tracerLogInclude = tracerLogInclude.toArray(Strings.EMPTY_ARRAY); } - void setTracelLogExclude(String[] tracelLogExclude) { - this.tracelLogExclude = tracelLogExclude; + void setTracerLogExclude(List tracelLogExclude) { + this.tracelLogExclude = tracelLogExclude.toArray(Strings.EMPTY_ARRAY); } @Override protected void doStart() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 1640cfdd3b5..1895d3ee326 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -24,6 +24,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -72,7 +75,7 @@ public class SettingTests extends ESTestCase { settingUpdater.apply(build, Settings.EMPTY); fail("not a boolean"); } catch (IllegalArgumentException ex) { - assertEquals("Failed to parse value [I am not a boolean] for setting [foo.bar]", ex.getMessage()); + assertEquals("Failed to parse value [I am not a boolean] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]", ex.getMessage()); } } @@ -248,4 +251,55 @@ public class SettingTests extends ESTestCase { assertEquals(1, c.b.intValue()); } + + public void testListSettings() { + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + List value = listSetting.get(Settings.EMPTY); + assertEquals(1, value.size()); + assertEquals("foo,bar", value.get(0)); + + List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); + Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + value = listSetting.get(builder.build()); + assertEquals(input.size(), value.size()); + assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); + + // try to parse this really annoying format + builder = Settings.builder(); + for (int i = 0; i < input.size(); i++) { + builder.put("foo.bar." + i, input.get(i)); + } + value = listSetting.get(builder.build()); + assertEquals(input.size(), value.size()); + assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); + + AtomicReference> ref = new AtomicReference<>(); + AbstractScopedSettings.SettingUpdater settingUpdater = listSetting.newUpdater(ref::set, logger); + assertTrue(settingUpdater.hasChanged(builder.build(), Settings.EMPTY)); + settingUpdater.apply(builder.build(), Settings.EMPTY); + assertEquals(input.size(), ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0])); + + settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build()); + assertEquals(1, ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"}); + + settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build()); + assertEquals(3, ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"}); + + settingUpdater.apply(Settings.EMPTY, Settings.builder().put("foo.bar", "1,2,3").build()); + assertEquals(1, ref.get().size()); + assertEquals("foo,bar", ref.get().get(0)); + + Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, true, Setting.Scope.CLUSTER); + List defaultValue = otherSettings.get(Settings.EMPTY); + assertEquals(0, defaultValue.size()); + List intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build()); + assertEquals(4, intValues.size()); + for (int i = 0; i < intValues.size(); i++) { + assertEquals(i, intValues.get(i).intValue()); + } + + } } From 32881c08b3f1e05c213d0bf0f53a51cdb30c59c9 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 12:00:34 +0100 Subject: [PATCH 059/322] fix more excpption useage --- .../main/java/org/elasticsearch/common/settings/Setting.java | 2 ++ .../action/admin/cluster/settings/SettingsUpdaterTests.java | 1 + 2 files changed, 3 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 758bff5c6cc..4e3d708ce21 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -120,6 +120,8 @@ public class Setting extends ToXContentToBytes { return parser.apply(value); } catch (ElasticsearchParseException ex) { throw new IllegalArgumentException(ex.getMessage(), ex); + } catch (NumberFormatException ex) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", ex); } catch (IllegalArgumentException ex) { throw ex; } catch (Exception t) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java index fe4f8bbdf10..bd1377b89fe 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -91,6 +91,7 @@ public class SettingsUpdaterTests extends ESTestCase { Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); fail("all or nothing"); } catch (IllegalArgumentException ex) { + logger.info("", ex); assertEquals("Failed to parse value [not a float] for setting [cluster.routing.allocation.balance.index]", ex.getMessage()); } assertNull("updater only does a dryRun", index.get()); From e586c966becd3ef7428cd9c56478749db5e2fb5c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 12:08:48 +0100 Subject: [PATCH 060/322] fix several .getKey usages --- .../indices/memory/breaker/CircuitBreakerNoopIT.java | 4 ++-- .../main/java/org/elasticsearch/test/InternalTestCluster.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index 08df60126a5..b1b56acd8cd 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -41,10 +41,10 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop") // This is set low, because if the "noop" is not a noop, it will break .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop") // This is set low, because if the "noop" is not a noop, it will break .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .build(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 2e89cc48312..06724d2c7d9 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -416,8 +416,8 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextInt(10) == 0) { - builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); - builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); + builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop"); + builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop"); } if (random.nextBoolean()) { From e5dc124811f989e0b3b43b8cb8f1bdf981d15338 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 12:27:07 +0100 Subject: [PATCH 061/322] apply review from @bleskes --- .../index/translog/Translog.java | 34 +++++++------------ .../index/translog/TranslogTests.java | 25 +++++++++++++- 2 files changed, 37 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 6329e5cc998..a105f652728 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -421,13 +421,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return location; } } catch (AlreadyClosedException | IOException ex) { - if (current.getTragicException() != null) { - try { - close(); - } catch (Exception inner) { - ex.addSuppressed(inner); - } - } + closeOnTragicEvent(ex); throw ex; } catch (Throwable e) { throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e); @@ -507,13 +501,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC current.sync(); } } catch (AlreadyClosedException | IOException ex) { - if (current.getTragicException() != null) { - try { - close(); - } catch (Exception inner) { - ex.addSuppressed(inner); - } - } + closeOnTragicEvent(ex); throw ex; } } @@ -546,18 +534,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return current.syncUpTo(location.translogLocation + location.size); } } catch (AlreadyClosedException | IOException ex) { - if (current.getTragicException() != null) { - try { - close(); - } catch (Exception inner) { - ex.addSuppressed(inner); - } - } + closeOnTragicEvent(ex); throw ex; } return false; } + private void closeOnTragicEvent(Throwable ex) { + if (current.getTragicException() != null) { + try { + close(); + } catch (Exception inner) { + ex.addSuppressed(inner); + } + } + } + /** * return stats */ diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 3ede7355524..51de00f74a6 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -1308,6 +1308,9 @@ public class TranslogTests extends ESTestCase { locations.add(translog.add(new Translog.Index("test", "" + opsSynced, Integer.toString(opsSynced).getBytes(Charset.forName("UTF-8"))))); translog.sync(); opsSynced++; + } catch (MockDirectoryWrapper.FakeIOException ex) { + failed = true; + assertFalse(translog.isOpen()); } catch (IOException ex) { failed = true; assertFalse(translog.isOpen()); @@ -1322,7 +1325,11 @@ public class TranslogTests extends ESTestCase { fail("we are already closed"); } catch (AlreadyClosedException ex) { assertNotNull(ex.getCause()); - assertEquals(ex.getCause().getMessage(), "__FAKE__ no space left on device"); + if (ex.getCause() instanceof MockDirectoryWrapper.FakeIOException) { + assertNull(ex.getCause().getMessage()); + } else { + assertEquals(ex.getCause().getMessage(), "__FAKE__ no space left on device"); + } } } @@ -1423,6 +1430,22 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < threadCount; i++) { threads[i].join(); } + boolean atLeastOneFailed = false; + for (Throwable ex : threadExceptions) { + if (ex != null) { + atLeastOneFailed = true; + break; + } + } + if (atLeastOneFailed == false) { + try { + boolean syncNeeded = translog.syncNeeded(); + translog.close(); + assertFalse("should have failed if sync was needed", syncNeeded); + } catch (IOException ex) { + // boom now we failed + } + } Collections.sort(writtenOperations, (a, b) -> a.location.compareTo(b.location)); assertFalse(translog.isOpen()); final Checkpoint checkpoint = Checkpoint.read(config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME)); From 4597a22ace8f51569590c18b81a6616f49fa2e70 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 16 Dec 2015 12:52:28 +0100 Subject: [PATCH 062/322] Merge pull request #15473 from jmluy/patch-1 Update sample in sort for consistency --- docs/reference/search/request/sort.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 8d0b6708979..14ab207c301 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -238,7 +238,7 @@ Format in `lat,lon`. "sort" : [ { "_geo_distance" : { - "pin.location" : "-70,40", + "pin.location" : "40,-70", "order" : "asc", "unit" : "km" } @@ -301,7 +301,7 @@ Multiple geo points can be passed as an array containing any `geo_point` format, [source,js] -------------------------------------------------- "pin.location" : [[-70, 40], [-71, 42]] -"pin.location" : [{"lat": -70, "lon": 40}, {"lat": -71, "lon": 42}] +"pin.location" : [{"lat": 40, "lon": -70}, {"lat": 42, "lon": -71}] -------------------------------------------------- and so forth. From c1ff619781b935ca9c8879ffeaf768bbc179e1e8 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 14:13:22 +0100 Subject: [PATCH 063/322] fix cluster settings to accept complex list settigns --- .../settings/AbstractScopedSettings.java | 7 +++---- .../elasticsearch/common/settings/Setting.java | 17 ++++++++++++++--- .../common/settings/ScopedSettingsTests.java | 14 ++++++++++++++ .../common/settings/SettingTests.java | 18 ++++++++++++++++++ 4 files changed, 49 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index c7361e75b1d..1efc9d3fed6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.util.*; import java.util.function.BiConsumer; @@ -34,7 +33,7 @@ import java.util.function.Consumer; public abstract class AbstractScopedSettings extends AbstractComponent { private Settings lastSettingsApplied = Settings.EMPTY; private final List settingUpdaters = new ArrayList<>(); - private final Map> groupSettings = new HashMap<>(); + private final Map> complexMatchers = new HashMap<>(); private final Map> keySettings = new HashMap<>(); private final Setting.Scope scope; @@ -45,7 +44,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); } if (entry.isGroupSetting()) { - groupSettings.put(entry.getKey(), entry); + complexMatchers.put(entry.getKey(), entry); } else { keySettings.put(entry.getKey(), entry); } @@ -216,7 +215,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { public Setting get(String key) { Setting setting = keySettings.get(key); if (setting == null) { - for (Map.Entry> entry : groupSettings.entrySet()) { + for (Map.Entry> entry : complexMatchers.entrySet()) { if (entry.getValue().match(key)) { return entry.getValue(); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 4e3d708ce21..884479d7cb6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.regex.Pattern; /** */ @@ -95,6 +96,10 @@ public class Setting extends ToXContentToBytes { return false; } + boolean hasComplexMatcher() { + return isGroupSetting(); + } + /** * Returns the default values string representation for this setting. * @param settings a settings object for settings that has a default value depending on another setting if available @@ -143,7 +148,7 @@ public class Setting extends ToXContentToBytes { * @see #isGroupSetting() */ public boolean match(String toTest) { - return Regex.simpleMatch(key, toTest); + return key.equals(toTest); } @Override @@ -326,15 +331,21 @@ public class Setting extends ToXContentToBytes { } }; return new Setting>(key, arrayToParsableString(defaultStringValue.toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { - + private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override public String getRaw(Settings settings) { String[] array = settings.getAsArray(key, null); - return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); } + public boolean match(String toTest) { + return pattern.matcher(toTest).matches(); + } + @Override + boolean hasComplexMatcher() { + return true; + } }; } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 9c5320b1354..b54784dc16d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -21,12 +21,15 @@ package org.elasticsearch.common.settings; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; public class ScopedSettingsTests extends ESTestCase { @@ -138,4 +141,15 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(diff.getAsMap().size(), 1); assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(17)); } + + public void testUpdateTracer() { + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AtomicReference> ref = new AtomicReference<>(); + settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set); + settings.applySettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build()); + assertNotNull(ref.get().size()); + assertEquals(ref.get().size(), 2); + assertTrue(ref.get().contains("internal:index/shard/recovery/*")); + assertTrue(ref.get().contains("internal:gateway/local*")); + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 1895d3ee326..069418a7e1d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -300,6 +300,24 @@ public class SettingTests extends ESTestCase { for (int i = 0; i < intValues.size(); i++) { assertEquals(i, intValues.get(i).intValue()); } + } + + public void testListSettingAcceptsNumberSyntax() { + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); + Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + // try to parse this really annoying format + for (String key : builder.internalMap().keySet()) { + assertTrue("key: " + key + " doesn't match", listSetting.match(key)); + } + builder = Settings.builder().put("foo.bar", "1,2,3"); + for (String key : builder.internalMap().keySet()) { + assertTrue("key: " + key + " doesn't match", listSetting.match(key)); + } + assertFalse(listSetting.match("foo_bar")); + assertFalse(listSetting.match("foo_bar.1")); + assertTrue(listSetting.match("foo.bar")); + assertTrue(listSetting.match("foo.bar." + randomIntBetween(0,10000))); } } From 8ac8c1f54770049ce35034ed3bede66fa6add9fd Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 16 Dec 2015 15:54:11 +0100 Subject: [PATCH 064/322] Make mapping serialization more robust. When creating a metadata mapper for a new type, we reuse an existing configuration from an existing type (if any) in order to avoid introducing conflicts. However this field type that is provided is considered as both an initial configuration and the default configuration. So at serialization time, we might only serialize the difference between the current configuration and this default configuration, which might be different to what is actually considered the default configuration. This does not cause bugs today because metadata mappers usually override the toXContent method and compare the current field type with Defaults.FIELD_TYPE instead of defaultFieldType() but I would still like to do this change to avoid future bugs. --- .../main/java/org/elasticsearch/index/mapper/FieldMapper.java | 4 ++-- .../org/elasticsearch/index/mapper/MetadataFieldMapper.java | 4 ++-- .../elasticsearch/index/mapper/core/BinaryFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/BooleanFieldMapper.java | 2 +- .../index/mapper/core/CompletionFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/NumberFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/StringFieldMapper.java | 2 +- .../index/mapper/geo/BaseGeoPointFieldMapper.java | 2 +- .../elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/AllFieldMapper.java | 2 +- .../index/mapper/internal/FieldNamesFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/IdFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/IndexFieldMapper.java | 2 +- .../index/mapper/internal/ParentFieldMapper.java | 2 +- .../index/mapper/internal/RoutingFieldMapper.java | 2 +- .../index/mapper/internal/SourceFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/TTLFieldMapper.java | 2 +- .../index/mapper/internal/TimestampFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/TypeFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/UidFieldMapper.java | 2 +- .../index/mapper/internal/VersionFieldMapper.java | 2 +- .../index/mapper/externalvalues/ExternalMapper.java | 2 +- .../index/mapper/externalvalues/ExternalMetadataMapper.java | 2 +- .../index/mapper/internal/FieldNamesFieldMapperTests.java | 2 +- .../elasticsearch/mapper/attachments/AttachmentMapper.java | 2 +- .../org/elasticsearch/index/mapper/size/SizeFieldMapper.java | 2 +- 26 files changed, 28 insertions(+), 28 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 6f447cdeb86..30df3562aec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -64,10 +64,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable { protected final MultiFields.Builder multiFieldsBuilder; protected CopyTo copyTo; - protected Builder(String name, MappedFieldType fieldType) { + protected Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { super(name); this.fieldType = fieldType.clone(); - this.defaultFieldType = fieldType.clone(); + this.defaultFieldType = defaultFieldType.clone(); this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable multiFieldsBuilder = new MultiFields.Builder(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 2f3b40126ed..622c7729dd4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -51,8 +51,8 @@ public abstract class MetadataFieldMapper extends FieldMapper { } public abstract static class Builder extends FieldMapper.Builder { - public Builder(String name, MappedFieldType fieldType) { - super(name, fieldType); + public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { + super(name, fieldType, defaultFieldType); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 7468f4fb2f6..0ee311678ef 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -72,7 +72,7 @@ public class BinaryFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index cd76fdbb047..e381bc9c60b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -72,7 +72,7 @@ public class BooleanFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); this.builder = this; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 9d465b4cffc..69177401db7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -356,7 +356,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp * @param name of the completion field to build */ public Builder(String name) { - super(name, new CompletionFieldType()); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 04dd1a21335..ed537aa7e5f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -66,7 +66,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM private Boolean coerce; public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { - super(name, fieldType); + super(name, fieldType, fieldType); this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 061d3a2e343..08582c65997 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -98,7 +98,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index f7910e1f5af..0bbe2fe8f1b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -94,7 +94,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected Boolean ignoreMalformed; public Builder(String name, GeoPointFieldType fieldType) { - super(name, fieldType); + super(name, fieldType, fieldType); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 7c100a306c2..a99517f4003 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -120,7 +120,7 @@ public class GeoShapeFieldMapper extends FieldMapper { private Boolean coerce; public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 4676c63e793..bcd094d2ae6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -101,7 +101,7 @@ public class AllFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabled = Defaults.ENABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); builder = this; indexName = Defaults.INDEX_NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 7883415e59a..e03439f3f54 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -78,7 +78,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { private boolean enabled = Defaults.ENABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index a0b7cddae76..0fe3e10bcb8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -89,7 +89,7 @@ public class IdFieldMapper extends MetadataFieldMapper { private String path = Defaults.PATH; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 167807f3b2a..dbbf03b72e2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -79,7 +79,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 6142bf475ec..65daef2a834 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -97,7 +97,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); public Builder(String documentType) { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); this.indexName = name; this.documentType = documentType; builder = this; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index e791ad376c3..40b7e6871c4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -77,7 +77,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { private String path = Defaults.PATH; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); } public Builder required(boolean required) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 4d47c3bf446..40bf9eb0c8e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -88,7 +88,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { private String[] excludes = null; public Builder() { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(boolean enabled) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 7a17e56e7dd..f99ca18600a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -78,7 +78,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { private long defaultTTL = Defaults.DEFAULT; public Builder() { - super(Defaults.NAME, Defaults.TTL_FIELD_TYPE); + super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(EnabledAttributeMapper enabled) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 0657d67857b..b0606f1994f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -96,7 +96,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { private Boolean ignoreMissing = null; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); if (existing != null) { // if there is an existing type, always use that store value (only matters for < 2.0) explicitStore = true; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index a140593943f..c529db5183e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -80,7 +80,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index 1cf3b9d9ac3..10f9880d97d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -78,7 +78,7 @@ public class UidFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index d9659f40c22..6b1471afda7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -61,7 +61,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder() { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index e7b7c26b84f..6f7541a272a 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -81,7 +81,7 @@ public class ExternalMapper extends FieldMapper { private String mapperName; public Builder(String name, String generatedValue, String mapperName) { - super(name, new ExternalFieldType()); + super(name, new ExternalFieldType(), new ExternalFieldType()); this.builder = this; this.stringBuilder = stringField(name).store(false); this.generatedValue = generatedValue; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index 2731e30a84e..8bdb5670dbb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -98,7 +98,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { protected Builder() { - super(CONTENT_TYPE, FIELD_TYPE); + super(CONTENT_TYPE, FIELD_TYPE, FIELD_TYPE); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index 3f3c5702e8c..f97b22e0ecb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -204,7 +204,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { @Override public Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return new MetadataFieldMapper.Builder("_dummy", FIELD_TYPE) { + return new MetadataFieldMapper.Builder("_dummy", FIELD_TYPE, FIELD_TYPE) { @Override public DummyMetadataFieldMapper build(BuilderContext context) { return new DummyMetadataFieldMapper(context.indexSettings()); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 66ecbe0850b..ffae8205e33 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -131,7 +131,7 @@ public class AttachmentMapper extends FieldMapper { private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE); public Builder(String name) { - super(name, new AttachmentFieldType()); + super(name, new AttachmentFieldType(), new AttachmentFieldType()); this.builder = this; this.contentBuilder = stringField(FieldNames.CONTENT); } diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index fb5d47bdf69..1e27e18bac7 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -66,7 +66,7 @@ public class SizeFieldMapper extends MetadataFieldMapper { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder(MappedFieldType existing) { - super(NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing); + super(NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing, Defaults.SIZE_FIELD_TYPE); builder = this; } From 89a09b9bedc7665abc9b6c5dcadb3f487a6626de Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Dec 2015 21:45:47 -0500 Subject: [PATCH 065/322] Master should wait on cluster state publication when failing a shard When a client sends a request to fail a shard to the master, the current behavior is that the master will submit the cluster state update task and then immediately send a successful response back to the client; additionally, if there are any failures while processing the cluster state update task to fail the shard, then the client will never be notified of these failures. This commit modifies the master behavior when handling requests to fail a shard. In particular, the master will now wait until successful publication of the cluster state update before notifying the request client that the shard is marked as failed; additionally, the client is now notified of any failures during the execution of the cluster state update task. Relates #14252 --- .../action/shard/ShardStateAction.java | 73 +++++++++++++------ 1 file changed, 51 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index c2ac791aa16..3a01ced6ebf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; @@ -53,6 +54,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; @@ -113,7 +115,7 @@ public class ShardStateAction extends AbstractComponent { @Override public void handleException(TransportException exp) { - logger.warn("failed to send failed shard to {}", exp, masterNode); + logger.warn("unexpected failure while sending request to [{}] to fail shard [{}]", exp, masterNode, shardRoutingEntry); listener.onShardFailedFailure(masterNode, exp); } }); @@ -122,22 +124,62 @@ public class ShardStateAction extends AbstractComponent { private class ShardFailedTransportHandler implements TransportRequestHandler { @Override public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - handleShardFailureOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); + handleShardFailureOnMaster(request, new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Throwable t) { + logger.error("unexpected failure while failing shard [{}]", t, request.shardRouting); + try { + channel.sendResponse(t); + } catch (Throwable channelThrowable) { + logger.warn("failed to send failure [{}] while failing shard [{}]", channelThrowable, t, request.shardRouting); + } + } + + @Override + public void onNoLongerMaster(String source) { + logger.error("no longer master while failing shard [{}]", request.shardRouting); + try { + channel.sendResponse(new NotMasterException(source)); + } catch (Throwable channelThrowable) { + logger.warn("failed to send no longer master while failing shard [{}]", channelThrowable, request.shardRouting); + } + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + try { + int numberOfUnassignedShards = newState.getRoutingNodes().unassigned().size(); + if (oldState != newState && numberOfUnassignedShards > 0) { + String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shard [%s]", numberOfUnassignedShards, request.shardRouting); + if (logger.isTraceEnabled()) { + logger.trace(reason + ", scheduling a reroute"); + } + routingService.reroute(reason); + } + } finally { + try { + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } catch (Throwable channelThrowable) { + logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting); + } + } + } + } + ); } } - class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder batchResultBuilder = BatchResult.builder(); - List shardRoutingsToBeApplied = new ArrayList<>(tasks.size()); + List failedShards = new ArrayList<>(tasks.size()); for (ShardRoutingEntry task : tasks) { - shardRoutingsToBeApplied.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); + failedShards.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); } ClusterState maybeUpdatedState = currentState; try { - RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, shardRoutingsToBeApplied); + RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, failedShards); if (result.changed()) { maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build(); } @@ -147,31 +189,18 @@ public class ShardStateAction extends AbstractComponent { } return batchResultBuilder.build(maybeUpdatedState); } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - if (oldState != newState && newState.getRoutingNodes().unassigned().size() > 0) { - logger.trace("unassigned shards after shard failures. scheduling a reroute."); - routingService.reroute("unassigned shards after shard failures, scheduling a reroute"); - } - } - - @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure during [{}]", t, source); - } } private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); - private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry) { + private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry, ClusterStateTaskListener listener) { logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); clusterService.submitStateUpdateTask( "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", shardRoutingEntry, ClusterStateTaskConfig.build(Priority.HIGH), shardFailedClusterStateHandler, - shardFailedClusterStateHandler); + listener); } public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason) { From 02354fc491c2767448b21150b6ed0e58dfefb322 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 10:53:59 -0500 Subject: [PATCH 066/322] More useful assertions in o.e.c.s.InternalClusterService --- .../cluster/service/InternalClusterService.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index afdfea65328..b8c898a31e9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -72,6 +72,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentMap; @@ -442,9 +443,15 @@ public class InternalClusterService extends AbstractLifecycleComponent finalBatchResult = batchResult; - assert toExecute.stream().map(updateTask -> updateTask.task).allMatch(finalBatchResult.executionResults::containsKey); + assert batchResult.executionResults.size() == toExecute.size() + : String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", toExecute.size(), toExecute.size() == 1 ? "" : "s", batchResult.executionResults.size()); + boolean assertsEnabled = false; + assert (assertsEnabled = true); + if (assertsEnabled) { + for (UpdateTask updateTask : toExecute) { + assert batchResult.executionResults.containsKey(updateTask.task) : "missing task result for [" + updateTask.task + "]"; + } + } ClusterState newClusterState = batchResult.resultingState; final ArrayList> proccessedListeners = new ArrayList<>(); From 99f9bd7cfe0191214dc95a02d817874e1ecafa5b Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 16 Dec 2015 14:40:16 +0100 Subject: [PATCH 067/322] Throw exception when trying to write map with null keys Closes #14346 --- .../common/xcontent/XContentBuilder.java | 28 ++++----------- .../builder/XContentBuilderTests.java | 34 ++++++++++++++++--- 2 files changed, 36 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index af8e7534692..d26485a121d 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -53,7 +53,7 @@ import java.util.Map; */ public final class XContentBuilder implements BytesStream, Releasable { - public static enum FieldCaseConversion { + public enum FieldCaseConversion { /** * No conversion will occur. */ @@ -251,14 +251,7 @@ public final class XContentBuilder implements BytesStream, Releasable { } public XContentBuilder field(XContentBuilderString name) throws IOException { - if (fieldCaseConversion == FieldCaseConversion.UNDERSCORE) { - generator.writeFieldName(name.underscore()); - } else if (fieldCaseConversion == FieldCaseConversion.CAMELCASE) { - generator.writeFieldName(name.camelCase()); - } else { - generator.writeFieldName(name.underscore()); - } - return this; + return field(name, fieldCaseConversion); } public XContentBuilder field(XContentBuilderString name, FieldCaseConversion conversion) throws IOException { @@ -273,22 +266,13 @@ public final class XContentBuilder implements BytesStream, Releasable { } public XContentBuilder field(String name) throws IOException { - if (fieldCaseConversion == FieldCaseConversion.UNDERSCORE) { - if (cachedStringBuilder == null) { - cachedStringBuilder = new StringBuilder(); - } - name = Strings.toUnderscoreCase(name, cachedStringBuilder); - } else if (fieldCaseConversion == FieldCaseConversion.CAMELCASE) { - if (cachedStringBuilder == null) { - cachedStringBuilder = new StringBuilder(); - } - name = Strings.toCamelCase(name, cachedStringBuilder); - } - generator.writeFieldName(name); - return this; + return field(name, fieldCaseConversion); } public XContentBuilder field(String name, FieldCaseConversion conversion) throws IOException { + if (name == null) { + throw new IllegalArgumentException("field name cannot be null"); + } if (conversion == FieldCaseConversion.UNDERSCORE) { if (cachedStringBuilder == null) { cachedStringBuilder = new StringBuilder(); diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index 7ffafc004ab..9129e3c05b3 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent.builder; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.FastCharArrayWriter; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,6 +38,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; +import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; @@ -51,9 +51,6 @@ import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConvers import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.UNDERSCORE; import static org.hamcrest.Matchers.equalTo; -/** - * - */ public class XContentBuilderTests extends ESTestCase { public void testPrettyWithLfAtEnd() throws Exception { ByteArrayOutputStream os = new ByteArrayOutputStream(); @@ -350,4 +347,33 @@ public class XContentBuilderTests extends ESTestCase { "}", string.trim()); } + public void testWriteMapWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.map(Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } + + public void testWriteMapValueWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.value(Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } + + public void testWriteFieldMapWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.field("map", Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } } From db357f078aa1f817fc4078277e0a49b35079281f Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Wed, 16 Dec 2015 16:36:50 +0100 Subject: [PATCH 068/322] Merge pull request #15396 from brwe/java-api-for-synced-flush Add java API for synced flush closes #12812 --- .../elasticsearch/action/ActionModule.java | 3 + .../indices/flush/SyncedFlushAction.java | 44 +++++ .../indices/flush/SyncedFlushRequest.java | 64 +++++++ .../flush/SyncedFlushRequestBuilder.java | 41 +++++ .../indices/flush/SyncedFlushResponse.java} | 97 +++++++++-- .../flush/TransportSyncedFlushAction.java | 52 ++++++ .../client/IndicesAdminClient.java | 30 +++- .../org/elasticsearch/client/Requests.java | 14 +- .../client/support/AbstractClient.java | 19 +++ .../flush/ShardsSyncedFlushResult.java | 56 +++++- .../indices/flush/SyncedFlushService.java | 86 +++++----- .../indices/flush/RestSyncedFlushAction.java | 17 +- .../indices/flush/SyncedFlushUnitTests.java | 84 +++++++-- .../gateway/RecoveryFromGatewayIT.java | 2 +- .../gateway/ReusePeerRecoverySharedTest.java | 161 ++++++++++++++++++ .../elasticsearch/indices/flush/FlushIT.java | 9 +- .../flush/SyncedFlushSingleNodeTests.java | 4 +- .../indices/flush/SyncedFlushUtil.java | 24 +-- .../elasticsearch/test/ESIntegTestCase.java | 19 +-- 19 files changed, 696 insertions(+), 130 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java rename core/src/main/java/org/elasticsearch/{indices/flush/IndicesSyncedFlushResult.java => action/admin/indices/flush/SyncedFlushResponse.java} (64%) create mode 100644 core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java rename core/src/test/java/org/elasticsearch/{ => action/admin}/indices/flush/SyncedFlushUnitTests.java (51%) create mode 100644 core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 88ccb809712..adcb873e838 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -107,6 +107,8 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; +import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; @@ -293,6 +295,7 @@ public class ActionModule extends AbstractModule { registerAction(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class); registerAction(RefreshAction.INSTANCE, TransportRefreshAction.class); registerAction(FlushAction.INSTANCE, TransportFlushAction.class); + registerAction(SyncedFlushAction.INSTANCE, TransportSyncedFlushAction.class); registerAction(ForceMergeAction.INSTANCE, TransportForceMergeAction.class); registerAction(UpgradeAction.INSTANCE, TransportUpgradeAction.class); registerAction(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java new file mode 100644 index 00000000000..291fd49c63a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + + +public class SyncedFlushAction extends Action { + + public static final SyncedFlushAction INSTANCE = new SyncedFlushAction(); + public static final String NAME = "indices:admin/synced_flush"; + + private SyncedFlushAction() { + super(NAME); + } + + @Override + public SyncedFlushResponse newResponse() { + return new SyncedFlushResponse(); + } + + @Override + public SyncedFlushRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new SyncedFlushRequestBuilder(client, this); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java new file mode 100644 index 00000000000..59719fe8877 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; + +import java.util.Arrays; + +/** + * A synced flush request to sync flush one or more indices. The synced flush process of an index performs a flush + * and writes the same sync id to primary and all copies. + * + *

Best created with {@link org.elasticsearch.client.Requests#syncedFlushRequest(String...)}.

+ * + * @see org.elasticsearch.client.Requests#flushRequest(String...) + * @see org.elasticsearch.client.IndicesAdminClient#syncedFlush(SyncedFlushRequest) + * @see SyncedFlushResponse + */ +public class SyncedFlushRequest extends BroadcastRequest { + + public SyncedFlushRequest() { + } + + /** + * Copy constructor that creates a new synced flush request that is a copy of the one provided as an argument. + * The new request will inherit though headers and context from the original request that caused it. + */ + public SyncedFlushRequest(ActionRequest originalRequest) { + super(originalRequest); + } + + /** + * Constructs a new synced flush request against one or more indices. If nothing is provided, all indices will + * be sync flushed. + */ + public SyncedFlushRequest(String... indices) { + super(indices); + } + + + @Override + public String toString() { + return "SyncedFlushRequest{" + + "indices=" + Arrays.toString(indices) + "}"; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java new file mode 100644 index 00000000000..9e407260811 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ElasticsearchClient; + +public class SyncedFlushRequestBuilder extends ActionRequestBuilder { + + public SyncedFlushRequestBuilder(ElasticsearchClient client, SyncedFlushAction action) { + super(client, action, new SyncedFlushRequest()); + } + + public SyncedFlushRequestBuilder setIndices(String[] indices) { + super.request().indices(indices); + return this; + } + + public SyncedFlushRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { + super.request().indicesOptions(indicesOptions); + return this; + } +} diff --git a/core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java similarity index 64% rename from core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java rename to core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java index 435c0d138cd..5925370e5f7 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java @@ -16,16 +16,25 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices.flush; +package org.elasticsearch.action.admin.indices.flush; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.indices.flush.ShardsSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -34,13 +43,16 @@ import static java.util.Collections.unmodifiableMap; /** * The result of performing a sync flush operation on all shards of multiple indices */ -public class IndicesSyncedFlushResult implements ToXContent { +public class SyncedFlushResponse extends ActionResponse implements ToXContent { - final Map> shardsResultPerIndex; - final ShardCounts shardCounts; + Map> shardsResultPerIndex; + ShardCounts shardCounts; + SyncedFlushResponse() { - public IndicesSyncedFlushResult(Map> shardsResultPerIndex) { + } + + public SyncedFlushResponse(Map> shardsResultPerIndex) { // shardsResultPerIndex is never modified after it is passed to this // constructor so this is safe even though shardsResultPerIndex is a // ConcurrentHashMap @@ -48,17 +60,23 @@ public class IndicesSyncedFlushResult implements ToXContent { this.shardCounts = calculateShardCounts(Iterables.flatten(shardsResultPerIndex.values())); } - /** total number shards, including replicas, both assigned and unassigned */ + /** + * total number shards, including replicas, both assigned and unassigned + */ public int totalShards() { return shardCounts.total; } - /** total number of shards for which the operation failed */ + /** + * total number of shards for which the operation failed + */ public int failedShards() { return shardCounts.failed; } - /** total number of shards which were successfully sync-flushed */ + /** + * total number of shards which were successfully sync-flushed + */ public int successfulShards() { return shardCounts.successful; } @@ -91,8 +109,8 @@ public class IndicesSyncedFlushResult implements ToXContent { builder.endObject(); continue; } - Map failedShards = shardResults.failedShards(); - for (Map.Entry shardEntry : failedShards.entrySet()) { + Map failedShards = shardResults.failedShards(); + for (Map.Entry shardEntry : failedShards.entrySet()) { builder.startObject(); builder.field(Fields.SHARD, shardResults.shardId().id()); builder.field(Fields.REASON, shardEntry.getValue().failureReason()); @@ -123,11 +141,11 @@ public class IndicesSyncedFlushResult implements ToXContent { return new ShardCounts(total, successful, failed); } - static final class ShardCounts implements ToXContent { + static final class ShardCounts implements ToXContent, Streamable { - public final int total; - public final int successful; - public final int failed; + public int total; + public int successful; + public int failed; ShardCounts(int total, int successful, int failed) { this.total = total; @@ -135,6 +153,10 @@ public class IndicesSyncedFlushResult implements ToXContent { this.failed = failed; } + ShardCounts() { + + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(Fields.TOTAL, total); @@ -142,6 +164,20 @@ public class IndicesSyncedFlushResult implements ToXContent { builder.field(Fields.FAILED, failed); return builder; } + + @Override + public void readFrom(StreamInput in) throws IOException { + total = in.readInt(); + successful = in.readInt(); + failed = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(total); + out.writeInt(successful); + out.writeInt(failed); + } } static final class Fields { @@ -154,4 +190,37 @@ public class IndicesSyncedFlushResult implements ToXContent { static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); static final XContentBuilderString REASON = new XContentBuilderString("reason"); } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + shardCounts = new ShardCounts(); + shardCounts.readFrom(in); + Map> tmpShardsResultPerIndex = new HashMap<>(); + int numShardsResults = in.readInt(); + for (int i =0 ; i< numShardsResults; i++) { + String index = in.readString(); + List shardsSyncedFlushResults = new ArrayList<>(); + int numShards = in.readInt(); + for (int j =0; j< numShards; j++) { + shardsSyncedFlushResults.add(ShardsSyncedFlushResult.readShardsSyncedFlushResult(in)); + } + tmpShardsResultPerIndex.put(index, shardsSyncedFlushResults); + } + shardsResultPerIndex = Collections.unmodifiableMap(tmpShardsResultPerIndex); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardCounts.writeTo(out); + out.writeInt(shardsResultPerIndex.size()); + for (Map.Entry> entry : shardsResultPerIndex.entrySet()) { + out.writeString(entry.getKey()); + out.writeInt(entry.getValue().size()); + for (ShardsSyncedFlushResult shardsSyncedFlushResult : entry.getValue()) { + shardsSyncedFlushResult.writeTo(out); + } + } + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java new file mode 100644 index 00000000000..3ba354f4629 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.flush.SyncedFlushService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/** + * Synced flush Action. + */ +public class TransportSyncedFlushAction extends HandledTransportAction { + + SyncedFlushService syncedFlushService; + + @Inject + public TransportSyncedFlushAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SyncedFlushService syncedFlushService) { + super(settings, SyncedFlushAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SyncedFlushRequest::new); + this.syncedFlushService = syncedFlushService; + } + + @Override + protected void doExecute(SyncedFlushRequest request, ActionListener listener) { + syncedFlushService.attemptSyncedFlush(request.indices(), request.indicesOptions(), listener); + } +} diff --git a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 15def3b273e..73eed43352b 100644 --- a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -53,8 +53,8 @@ import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; @@ -82,11 +82,14 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder; -import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; @@ -390,6 +393,29 @@ public interface IndicesAdminClient extends ElasticsearchClient { */ FlushRequestBuilder prepareFlush(String... indices); + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + * + * @param request The sync flush request + * @return A result future + * @see org.elasticsearch.client.Requests#syncedFlushRequest(String...) + */ + ActionFuture syncedFlush(SyncedFlushRequest request); + + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + * + * @param request The sync flush request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#syncedFlushRequest(String...) + */ + void syncedFlush(SyncedFlushRequest request, ActionListener listener); + + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + */ + SyncedFlushRequestBuilder prepareSyncedFlush(String... indices); + /** * Explicitly force merge one or more indices into a the number of segments. * diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index 7f0decaba52..063fd10dcfc 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -50,6 +50,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -131,7 +132,7 @@ public class Requests { public static SuggestRequest suggestRequest(String... indices) { return new SuggestRequest(indices); } - + /** * Creates a search request against one or more indices. Note, the search source must be set either using the * actual JSON search source, or the {@link org.elasticsearch.search.builder.SearchSourceBuilder}. @@ -265,6 +266,17 @@ public class Requests { return new FlushRequest(indices); } + /** + * Creates a synced flush indices request. + * + * @param indices The indices to sync flush. Use null or _all to execute against all indices + * @return The synced flush request + * @see org.elasticsearch.client.IndicesAdminClient#syncedFlush(SyncedFlushRequest) + */ + public static SyncedFlushRequest syncedFlushRequest(String... indices) { + return new SyncedFlushRequest(indices); + } + /** * Creates a force merge request. * diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 1b5e8539ac6..ea57901f2b3 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -188,6 +188,10 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; @@ -1315,6 +1319,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new FlushRequestBuilder(this, FlushAction.INSTANCE).setIndices(indices); } + @Override + public ActionFuture syncedFlush(SyncedFlushRequest request) { + return execute(SyncedFlushAction.INSTANCE, request); + } + + @Override + public void syncedFlush(SyncedFlushRequest request, ActionListener listener) { + execute(SyncedFlushAction.INSTANCE, request, listener); + } + + @Override + public SyncedFlushRequestBuilder prepareSyncedFlush(String... indices) { + return new SyncedFlushRequestBuilder(this, SyncedFlushAction.INSTANCE).setIndices(indices); + } + @Override public void getMappings(GetMappingsRequest request, ActionListener listener) { execute(GetMappingsAction.INSTANCE, request, listener); diff --git a/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java b/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java index f7ae5f94b96..220ce9120e9 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java @@ -19,8 +19,12 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.index.shard.ShardId; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -30,15 +34,15 @@ import static java.util.Collections.unmodifiableMap; /** * Result for all copies of a shard */ -public class ShardsSyncedFlushResult { +public class ShardsSyncedFlushResult implements Streamable { private String failureReason; - private Map shardResponses; + private Map shardResponses; private String syncId; private ShardId shardId; // some shards may be unassigned, so we need this as state private int totalShards; - public ShardsSyncedFlushResult() { + private ShardsSyncedFlushResult() { } public ShardId getShardId() { @@ -59,7 +63,7 @@ public class ShardsSyncedFlushResult { /** * success constructor */ - public ShardsSyncedFlushResult(ShardId shardId, String syncId, int totalShards, Map shardResponses) { + public ShardsSyncedFlushResult(ShardId shardId, String syncId, int totalShards, Map shardResponses) { this.failureReason = null; this.shardResponses = unmodifiableMap(new HashMap<>(shardResponses)); this.syncId = syncId; @@ -98,7 +102,7 @@ public class ShardsSyncedFlushResult { */ public int successfulShards() { int i = 0; - for (SyncedFlushService.SyncedFlushResponse result : shardResponses.values()) { + for (SyncedFlushService.ShardSyncedFlushResponse result : shardResponses.values()) { if (result.success()) { i++; } @@ -109,9 +113,9 @@ public class ShardsSyncedFlushResult { /** * @return an array of shard failures */ - public Map failedShards() { - Map failures = new HashMap<>(); - for (Map.Entry result : shardResponses.entrySet()) { + public Map failedShards() { + Map failures = new HashMap<>(); + for (Map.Entry result : shardResponses.entrySet()) { if (result.getValue().success() == false) { failures.put(result.getKey(), result.getValue()); } @@ -123,11 +127,45 @@ public class ShardsSyncedFlushResult { * @return Individual responses for each shard copy with a detailed failure message if the copy failed to perform the synced flush. * Empty if synced flush failed before step three. */ - public Map shardResponses() { + public Map shardResponses() { return shardResponses; } public ShardId shardId() { return shardId; } + + @Override + public void readFrom(StreamInput in) throws IOException { + failureReason = in.readOptionalString(); + int numResponses = in.readInt(); + shardResponses = new HashMap<>(); + for (int i = 0; i < numResponses; i++) { + ShardRouting shardRouting = ShardRouting.readShardRoutingEntry(in); + SyncedFlushService.ShardSyncedFlushResponse response = SyncedFlushService.ShardSyncedFlushResponse.readSyncedFlushResponse(in); + shardResponses.put(shardRouting, response); + } + syncId = in.readOptionalString(); + shardId = ShardId.readShardId(in); + totalShards = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(failureReason); + out.writeInt(shardResponses.size()); + for (Map.Entry entry : shardResponses.entrySet()) { + entry.getKey().writeTo(out); + entry.getValue().writeTo(out); + } + out.writeOptionalString(syncId); + shardId.writeTo(out); + out.writeInt(totalShards); + } + + public static ShardsSyncedFlushResult readShardsSyncedFlushResult(StreamInput in) throws IOException { + ShardsSyncedFlushResult shardsSyncedFlushResult = new ShardsSyncedFlushResult(); + shardsSyncedFlushResult.readFrom(in); + return shardsSyncedFlushResult; + } } diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index ad264c2ac05..0918ad2afee 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -81,9 +82,8 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; - - transportService.registerRequestHandler(PRE_SYNCED_FLUSH_ACTION_NAME, PreSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new PreSyncedFlushTransportHandler()); - transportService.registerRequestHandler(SYNCED_FLUSH_ACTION_NAME, SyncedFlushRequest::new, ThreadPool.Names.FLUSH, new SyncedFlushTransportHandler()); + transportService.registerRequestHandler(PRE_SYNCED_FLUSH_ACTION_NAME, PreShardSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new PreSyncedFlushTransportHandler()); + transportService.registerRequestHandler(SYNCED_FLUSH_ACTION_NAME, ShardSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new SyncedFlushTransportHandler()); transportService.registerRequestHandler(IN_FLIGHT_OPS_ACTION_NAME, InFlightOpsRequest::new, ThreadPool.Names.SAME, new InFlightOpCountTransportHandler()); } @@ -109,7 +109,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL * a utility method to perform a synced flush for all shards of multiple indices. see {@link #attemptSyncedFlush(ShardId, ActionListener)} * for more details. */ - public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { + public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { final ClusterState state = clusterService.state(); final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, indicesOptions, aliasesOrIndices); final Map> results = ConcurrentCollections.newConcurrentMap(); @@ -123,7 +123,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } if (numberOfShards == 0) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); return; } final int finalTotalNumberOfShards = totalNumberOfShards; @@ -138,7 +138,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL public void onResponse(ShardsSyncedFlushResult syncedFlushResult) { results.get(index).add(syncedFlushResult); if (countDown.countDown()) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); } } @@ -147,7 +147,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL logger.debug("{} unexpected error while executing synced flush", shardId); results.get(index).add(new ShardsSyncedFlushResult(shardId, finalTotalNumberOfShards, e.getMessage())); if (countDown.countDown()) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); } } }); @@ -297,33 +297,33 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL void sendSyncRequests(final String syncId, final List shards, ClusterState state, Map expectedCommitIds, final ShardId shardId, final int totalShards, final ActionListener listener) { final CountDown countDown = new CountDown(shards.size()); - final Map results = ConcurrentCollections.newConcurrentMap(); + final Map results = ConcurrentCollections.newConcurrentMap(); for (final ShardRouting shard : shards) { final DiscoveryNode node = state.nodes().get(shard.currentNodeId()); if (node == null) { logger.trace("{} is assigned to an unknown node. skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); - results.put(shard, new SyncedFlushResponse("unknown node")); + results.put(shard, new ShardSyncedFlushResponse("unknown node")); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } final Engine.CommitId expectedCommitId = expectedCommitIds.get(shard.currentNodeId()); if (expectedCommitId == null) { logger.trace("{} can't resolve expected commit id for {}, skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); - results.put(shard, new SyncedFlushResponse("no commit id from pre-sync flush")); + results.put(shard, new ShardSyncedFlushResponse("no commit id from pre-sync flush")); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } logger.trace("{} sending synced flush request to {}. sync id [{}].", shardId, shard, syncId); - transportService.sendRequest(node, SYNCED_FLUSH_ACTION_NAME, new SyncedFlushRequest(shard.shardId(), syncId, expectedCommitId), - new BaseTransportResponseHandler() { + transportService.sendRequest(node, SYNCED_FLUSH_ACTION_NAME, new ShardSyncedFlushRequest(shard.shardId(), syncId, expectedCommitId), + new BaseTransportResponseHandler() { @Override - public SyncedFlushResponse newInstance() { - return new SyncedFlushResponse(); + public ShardSyncedFlushResponse newInstance() { + return new ShardSyncedFlushResponse(); } @Override - public void handleResponse(SyncedFlushResponse response) { - SyncedFlushResponse existing = results.put(shard, response); + public void handleResponse(ShardSyncedFlushResponse response) { + ShardSyncedFlushResponse existing = results.put(shard, response); assert existing == null : "got two answers for node [" + node + "]"; // count after the assert so we won't decrement twice in handleException contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); @@ -332,7 +332,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public void handleException(TransportException exp) { logger.trace("{} error while performing synced flush on [{}], skipping", exp, shardId, shard); - results.put(shard, new SyncedFlushResponse(exp.getMessage())); + results.put(shard, new ShardSyncedFlushResponse(exp.getMessage())); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); } @@ -346,7 +346,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } private void contDownAndSendResponseIfDone(String syncId, List shards, ShardId shardId, int totalShards, - ActionListener listener, CountDown countDown, Map results) { + ActionListener listener, CountDown countDown, Map results) { if (countDown.countDown()) { assert results.size() == shards.size(); listener.onResponse(new ShardsSyncedFlushResult(shardId, syncId, totalShards, results)); @@ -369,7 +369,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } continue; } - transportService.sendRequest(node, PRE_SYNCED_FLUSH_ACTION_NAME, new PreSyncedFlushRequest(shard.shardId()), new BaseTransportResponseHandler() { + transportService.sendRequest(node, PRE_SYNCED_FLUSH_ACTION_NAME, new PreShardSyncedFlushRequest(shard.shardId()), new BaseTransportResponseHandler() { @Override public PreSyncedFlushResponse newInstance() { return new PreSyncedFlushResponse(); @@ -401,7 +401,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - private PreSyncedFlushResponse performPreSyncedFlush(PreSyncedFlushRequest request) { + private PreSyncedFlushResponse performPreSyncedFlush(PreShardSyncedFlushRequest request) { IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); FlushRequest flushRequest = new FlushRequest().force(false).waitIfOngoing(true); logger.trace("{} performing pre sync flush", request.shardId()); @@ -410,7 +410,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL return new PreSyncedFlushResponse(commitId); } - private SyncedFlushResponse performSyncedFlush(SyncedFlushRequest request) { + private ShardSyncedFlushResponse performSyncedFlush(ShardSyncedFlushRequest request) { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.getShard(request.shardId().id()); logger.trace("{} performing sync flush. sync id [{}], expected commit id {}", request.shardId(), request.syncId(), request.expectedCommitId()); @@ -418,11 +418,11 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL logger.trace("{} sync flush done. sync id [{}], result [{}]", request.shardId(), request.syncId(), result); switch (result) { case SUCCESS: - return new SyncedFlushResponse(); + return new ShardSyncedFlushResponse(); case COMMIT_MISMATCH: - return new SyncedFlushResponse("commit has changed"); + return new ShardSyncedFlushResponse("commit has changed"); case PENDING_OPERATIONS: - return new SyncedFlushResponse("pending operations"); + return new ShardSyncedFlushResponse("pending operations"); default: throw new ElasticsearchException("unknown synced flush result [" + result + "]"); } @@ -439,19 +439,19 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL return new InFlightOpsResponse(opCount); } - public final static class PreSyncedFlushRequest extends TransportRequest { + public final static class PreShardSyncedFlushRequest extends TransportRequest { private ShardId shardId; - public PreSyncedFlushRequest() { + public PreShardSyncedFlushRequest() { } - public PreSyncedFlushRequest(ShardId shardId) { + public PreShardSyncedFlushRequest(ShardId shardId) { this.shardId = shardId; } @Override public String toString() { - return "PreSyncedFlushRequest{" + + return "PreShardSyncedFlushRequest{" + "shardId=" + shardId + '}'; } @@ -504,16 +504,16 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - public static final class SyncedFlushRequest extends TransportRequest { + public static final class ShardSyncedFlushRequest extends TransportRequest { private String syncId; private Engine.CommitId expectedCommitId; private ShardId shardId; - public SyncedFlushRequest() { + public ShardSyncedFlushRequest() { } - public SyncedFlushRequest(ShardId shardId, String syncId, Engine.CommitId expectedCommitId) { + public ShardSyncedFlushRequest(ShardId shardId, String syncId, Engine.CommitId expectedCommitId) { this.expectedCommitId = expectedCommitId; this.shardId = shardId; this.syncId = syncId; @@ -549,7 +549,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public String toString() { - return "SyncedFlushRequest{" + + return "ShardSyncedFlushRequest{" + "shardId=" + shardId + ",syncId='" + syncId + '\'' + '}'; @@ -559,18 +559,18 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL /** * Response for third step of synced flush (writing the sync id) for one shard copy */ - public static final class SyncedFlushResponse extends TransportResponse { + public static final class ShardSyncedFlushResponse extends TransportResponse { /** * a non null value indicates a failure to sync flush. null means success */ String failureReason; - public SyncedFlushResponse() { + public ShardSyncedFlushResponse() { failureReason = null; } - public SyncedFlushResponse(String failureReason) { + public ShardSyncedFlushResponse(String failureReason) { this.failureReason = failureReason; } @@ -596,11 +596,17 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public String toString() { - return "SyncedFlushResponse{" + + return "ShardSyncedFlushResponse{" + "success=" + success() + ", failureReason='" + failureReason + '\'' + '}'; } + + public static ShardSyncedFlushResponse readSyncedFlushResponse(StreamInput in) throws IOException { + ShardSyncedFlushResponse shardSyncedFlushResponse = new ShardSyncedFlushResponse(); + shardSyncedFlushResponse.readFrom(in); + return shardSyncedFlushResponse; + } } @@ -677,18 +683,18 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { + private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(PreSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel) throws Exception { channel.sendResponse(performPreSyncedFlush(request)); } } - private final class SyncedFlushTransportHandler implements TransportRequestHandler { + private final class SyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(SyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel) throws Exception { channel.sendResponse(performSyncedFlush(request)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index 9a3f844abb1..0b8ffcf94da 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -19,14 +19,14 @@ package org.elasticsearch.rest.action.admin.indices.flush; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.indices.flush.IndicesSyncedFlushResult; -import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestBuilderListener; @@ -38,12 +38,9 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSyncedFlushAction extends BaseRestHandler { - private final SyncedFlushService syncedFlushService; - @Inject - public RestSyncedFlushAction(Settings settings, RestController controller, Client client, SyncedFlushService syncedFlushService) { + public RestSyncedFlushAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); - this.syncedFlushService = syncedFlushService; controller.registerHandler(POST, "/_flush/synced", this); controller.registerHandler(POST, "/{index}/_flush/synced", this); @@ -53,12 +50,12 @@ public class RestSyncedFlushAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { - String[] indices = Strings.splitStringByCommaToArray(request.param("index")); IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, IndicesOptions.lenientExpandOpen()); - - syncedFlushService.attemptSyncedFlush(indices, indicesOptions, new RestBuilderListener(channel) { + SyncedFlushRequest syncedFlushRequest = new SyncedFlushRequest(Strings.splitStringByCommaToArray(request.param("index"))); + syncedFlushRequest.indicesOptions(indicesOptions); + client.admin().indices().syncedFlush(syncedFlushRequest, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(IndicesSyncedFlushResult results, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(SyncedFlushResponse results, XContentBuilder builder) throws Exception { builder.startObject(); results.toXContent(builder, request); builder.endObject(); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java similarity index 51% rename from core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java rename to core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index 19cce93c6e4..dfc6ea67c49 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -17,16 +17,20 @@ * under the License. */ -package org.elasticsearch.indices.flush; +package org.elasticsearch.action.admin.indices.flush; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse.ShardCounts; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.flush.IndicesSyncedFlushResult.ShardCounts; -import org.elasticsearch.indices.flush.SyncedFlushService.SyncedFlushResponse; +import org.elasticsearch.indices.flush.ShardsSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -42,14 +46,11 @@ import static org.hamcrest.Matchers.hasSize; public class SyncedFlushUnitTests extends ESTestCase { - private static class TestPlan { - public ShardCounts totalCounts; - public Map countsPerIndex = new HashMap<>(); + public SyncedFlushResponse.ShardCounts totalCounts; + public Map countsPerIndex = new HashMap<>(); public ObjectIntMap expectedFailuresPerIndex = new ObjectIntHashMap<>(); - - public IndicesSyncedFlushResult result; - + public SyncedFlushResponse result; } public void testIndicesSyncedFlushResult() throws IOException { @@ -76,6 +77,56 @@ public class SyncedFlushUnitTests extends ESTestCase { } } + public void testResponseStreaming() throws IOException { + final TestPlan testPlan = createTestPlan(); + assertThat(testPlan.result.totalShards(), equalTo(testPlan.totalCounts.total)); + assertThat(testPlan.result.successfulShards(), equalTo(testPlan.totalCounts.successful)); + assertThat(testPlan.result.failedShards(), equalTo(testPlan.totalCounts.failed)); + assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); + BytesStreamOutput out = new BytesStreamOutput(); + testPlan.result.writeTo(out); + StreamInput in = StreamInput.wrap(out.bytes()); + SyncedFlushResponse readResponse = new SyncedFlushResponse(); + readResponse.readFrom(in); + assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total)); + assertThat(readResponse.successfulShards(), equalTo(testPlan.totalCounts.successful)); + assertThat(readResponse.failedShards(), equalTo(testPlan.totalCounts.failed)); + assertThat(readResponse.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); + assertThat(readResponse.shardsResultPerIndex.size(), equalTo(testPlan.result.getShardsResultPerIndex().size())); + for (Map.Entry> entry : readResponse.getShardsResultPerIndex().entrySet()) { + List originalShardsResults = testPlan.result.getShardsResultPerIndex().get(entry.getKey()); + assertNotNull(originalShardsResults); + List readShardsResults = entry.getValue(); + assertThat(readShardsResults.size(), equalTo(originalShardsResults.size())); + for (int i = 0; i < readShardsResults.size(); i++) { + ShardsSyncedFlushResult originalShardResult = originalShardsResults.get(i); + ShardsSyncedFlushResult readShardResult = readShardsResults.get(i); + assertThat(originalShardResult.failureReason(), equalTo(readShardResult.failureReason())); + assertThat(originalShardResult.failed(), equalTo(readShardResult.failed())); + assertThat(originalShardResult.getShardId(), equalTo(readShardResult.getShardId())); + assertThat(originalShardResult.successfulShards(), equalTo(readShardResult.successfulShards())); + assertThat(originalShardResult.syncId(), equalTo(readShardResult.syncId())); + assertThat(originalShardResult.totalShards(), equalTo(readShardResult.totalShards())); + assertThat(originalShardResult.failedShards().size(), equalTo(readShardResult.failedShards().size())); + for (Map.Entry shardEntry : originalShardResult.failedShards().entrySet()) { + SyncedFlushService.ShardSyncedFlushResponse readShardResponse = readShardResult.failedShards().get(shardEntry.getKey()); + assertNotNull(readShardResponse); + SyncedFlushService.ShardSyncedFlushResponse originalShardResponse = shardEntry.getValue(); + assertThat(originalShardResponse.failureReason(), equalTo(readShardResponse.failureReason())); + assertThat(originalShardResponse.success(), equalTo(readShardResponse.success())); + } + assertThat(originalShardResult.shardResponses().size(), equalTo(readShardResult.shardResponses().size())); + for (Map.Entry shardEntry : originalShardResult.shardResponses().entrySet()) { + SyncedFlushService.ShardSyncedFlushResponse readShardResponse = readShardResult.shardResponses().get(shardEntry.getKey()); + assertNotNull(readShardResponse); + SyncedFlushService.ShardSyncedFlushResponse originalShardResponse = shardEntry.getValue(); + assertThat(originalShardResponse.failureReason(), equalTo(readShardResponse.failureReason())); + assertThat(originalShardResponse.success(), equalTo(readShardResponse.success())); + } + } + } + } + private void assertShardCount(String name, Map header, ShardCounts expectedCounts) { assertThat(name + " has unexpected total count", (Integer) header.get("total"), equalTo(expectedCounts.total)); assertThat(name + " has unexpected successful count", (Integer) header.get("successful"), equalTo(expectedCounts.successful)); @@ -105,32 +156,33 @@ public class SyncedFlushUnitTests extends ESTestCase { failures++; shardsResults.add(new ShardsSyncedFlushResult(shardId, replicas + 1, "simulated total failure")); } else { - Map shardResponses = new HashMap<>(); + Map shardResponses = new HashMap<>(); for (int copy = 0; copy < replicas + 1; copy++) { final ShardRouting shardRouting = TestShardRouting.newShardRouting(index, shard, "node_" + shardId + "_" + copy, null, - copy == 0, ShardRoutingState.STARTED, 0); + copy == 0, ShardRoutingState.STARTED, 0); if (randomInt(5) < 2) { // shard copy failure failed++; failures++; - shardResponses.put(shardRouting, new SyncedFlushResponse("copy failure " + shardId)); + shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse("copy failure " + shardId)); } else { successful++; - shardResponses.put(shardRouting, new SyncedFlushResponse()); + shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse()); } } shardsResults.add(new ShardsSyncedFlushResult(shardId, "_sync_id_" + shard, replicas + 1, shardResponses)); } } indicesResults.put(index, shardsResults); - testPlan.countsPerIndex.put(index, new ShardCounts(shards * (replicas + 1), successful, failed)); + testPlan.countsPerIndex.put(index, new SyncedFlushResponse.ShardCounts(shards * (replicas + 1), successful, failed)); testPlan.expectedFailuresPerIndex.put(index, failures); totalFailed += failed; totalShards += shards * (replicas + 1); totalSuccesful += successful; } - testPlan.result = new IndicesSyncedFlushResult(indicesResults); - testPlan.totalCounts = new ShardCounts(totalShards, totalSuccesful, totalFailed); + testPlan.result = new SyncedFlushResponse(indicesResults); + testPlan.totalCounts = new SyncedFlushResponse.ShardCounts(totalShards, totalSuccesful, totalFailed); return testPlan; } + } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 01c76b465a9..2bfa885fa79 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -369,7 +369,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { ensureGreen(); } else { logger.info("--> trying to sync flush"); - assertEquals(SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").failedShards(), 0); + assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); assertSyncIdsNotNull(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java new file mode 100644 index 00000000000..5e16fcebe86 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gateway; + +import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; +import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.indices.flush.SyncedFlushUtil; +import org.elasticsearch.indices.recovery.RecoveryState; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.elasticsearch.test.ESIntegTestCase.internalCluster; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThat; + +/** + * Test of file reuse on recovery shared between integration tests and backwards + * compatibility tests. + */ +public class ReusePeerRecoverySharedTest { + /** + * Test peer reuse on recovery. This is shared between RecoverFromGatewayIT + * and RecoveryBackwardsCompatibilityIT. + * + * @param indexSettings + * settings for the index to test + * @param restartCluster + * runnable that will restart the cluster under test + * @param logger + * logger for logging + * @param useSyncIds + * should this use synced flush? can't use synced from in the bwc + * tests + */ + public static void testCase(Settings indexSettings, Runnable restartCluster, ESLogger logger, boolean useSyncIds) { + /* + * prevent any rebalance actions during the peer recovery if we run into + * a relocation the reuse count will be 0 and this fails the test. We + * are testing here if we reuse the files on disk after full restarts + * for replicas. + */ + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put(indexSettings) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + logger.info("--> indexing docs"); + for (int i = 0; i < 1000; i++) { + client().prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + if ((i % 200) == 0) { + client().admin().indices().prepareFlush().execute().actionGet(); + } + } + if (randomBoolean()) { + client().admin().indices().prepareFlush().execute().actionGet(); + } + logger.info("--> running cluster health"); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + // just wait for merges + client().admin().indices().prepareForceMerge("test").setMaxNumSegments(100).get(); + client().admin().indices().prepareFlush().setWaitIfOngoing(true).setForce(true).get(); + + if (useSyncIds == false) { + logger.info("--> disabling allocation while the cluster is shut down"); + + // Disable allocations while we are closing nodes + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)).get(); + logger.info("--> full cluster restart"); + restartCluster.run(); + + logger.info("--> waiting for cluster to return to green after first shutdown"); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + } else { + logger.info("--> trying to sync flush"); + assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); + assertSyncIdsNotNull(); + } + + logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); + // Disable allocations while we are closing nodes + client().admin().cluster().prepareUpdateSettings().setTransientSettings( + settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + .get(); + logger.info("--> full cluster restart"); + restartCluster.run(); + + logger.info("--> waiting for cluster to return to green after {}shutdown", useSyncIds ? "" : "second "); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + + if (useSyncIds) { + assertSyncIdsNotNull(); + } + RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); + for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { + long recovered = 0; + for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { + if (file.name().startsWith("segments")) { + recovered += file.length(); + } + } + if (!recoveryState.getPrimary() && (useSyncIds == false)) { + logger.info("--> replica shard {} recovered from {} to {}, recovered {}, reuse {}", recoveryState.getShardId().getId(), + recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); + assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered)); + assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l)); + // we have to recover the segments file since we commit the translog ID on engine startup + assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(), + equalTo(recoveryState.getIndex().totalBytes() - recovered)); + assertThat("no files should be recovered except of the segments file", recoveryState.getIndex().recoveredFileCount(), + equalTo(1)); + assertThat("all files should be reused except of the segments file", recoveryState.getIndex().reusedFileCount(), + equalTo(recoveryState.getIndex().totalFileCount() - 1)); + assertThat("> 0 files should be reused", recoveryState.getIndex().reusedFileCount(), greaterThan(0)); + } else { + if (useSyncIds && !recoveryState.getPrimary()) { + logger.info("--> replica shard {} recovered from {} to {} using sync id, recovered {}, reuse {}", + recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); + } + assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l)); + assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes())); + assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0)); + assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount())); + } + } + } + + public static void assertSyncIdsNotNull() { + IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); + for (ShardStats shardStats : indexStats.getShards()) { + assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index aa8c9f18c01..9ff0df4d390 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; @@ -99,7 +100,7 @@ public class FlushIT extends ESIntegTestCase { result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId("test", 0)); } else { logger.info("--> sync flushing index [test]"); - IndicesSyncedFlushResult indicesResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); + SyncedFlushResponse indicesResult = client().admin().indices().prepareSyncedFlush("test").get(); result = indicesResult.getShardsResultPerIndex().get("test").get(0); } assertFalse(result.failed()); @@ -171,7 +172,7 @@ public class FlushIT extends ESIntegTestCase { assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } logger.info("--> trying sync flush"); - IndicesSyncedFlushResult syncedFlushResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); + SyncedFlushResponse syncedFlushResult = client().admin().indices().prepareSyncedFlush("test").get(); logger.info("--> sync flush done"); stop.set(true); indexingThread.join(); @@ -191,7 +192,7 @@ public class FlushIT extends ESIntegTestCase { for (final ShardStats shardStats : shardsStats) { for (final ShardsSyncedFlushResult shardResult : syncedFlushResults) { if (shardStats.getShardRouting().getId() == shardResult.shardId().getId()) { - for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { + for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { if (singleResponse.getKey().currentNodeId().equals(shardStats.getShardRouting().currentNodeId())) { if (singleResponse.getValue().success()) { logger.info("{} sync flushed on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); @@ -212,7 +213,7 @@ public class FlushIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder().put("index.routing.allocation.include._name", "nonexistent")).get(); // this should not hang but instead immediately return with empty result set - List shardsResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").getShardsResultPerIndex().get("test"); + List shardsResult = client().admin().indices().prepareSyncedFlush("test").get().getShardsResultPerIndex().get("test"); // just to make sure the test actually tests the right thing int numShards = client().admin().indices().prepareGetSettings("test").get().getIndexToSettings().get("test").getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); assertThat(shardsResult.size(), equalTo(numShards)); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 1a4bf8fd3f7..e4c9cb8a7ef 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -98,7 +98,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertNotNull(syncedFlushResult); assertEquals(1, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); - SyncedFlushService.SyncedFlushResponse response = syncedFlushResult.shardResponses().values().iterator().next(); + SyncedFlushService.ShardSyncedFlushResponse response = syncedFlushResult.shardResponses().values().iterator().next(); assertTrue(response.success()); } @@ -157,7 +157,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertNull(listener.result); assertEquals("no such index", listener.error.getMessage()); } - + public void testFailAfterIntermediateCommit() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index fef6c23231e..fdabaf6b5a8 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; @@ -31,6 +32,9 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; +import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.elasticsearch.test.ESTestCase.randomBoolean; + /** Utils for SyncedFlush */ public class SyncedFlushUtil { @@ -38,25 +42,6 @@ public class SyncedFlushUtil { } - /** - * Blocking single index version of {@link SyncedFlushService#attemptSyncedFlush(String[], IndicesOptions, ActionListener)} - */ - public static IndicesSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, String index) { - SyncedFlushService service = cluster.getInstance(SyncedFlushService.class); - LatchedListener listener = new LatchedListener(); - service.attemptSyncedFlush(new String[]{index}, IndicesOptions.lenientExpandOpen(), listener); - try { - listener.latch.await(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - if (listener.error != null) { - throw ExceptionsHelper.convertToElastic(listener.error); - } - return listener.result; - } - - /** * Blocking version of {@link SyncedFlushService#attemptSyncedFlush(ShardId, ActionListener)} */ @@ -109,5 +94,4 @@ public class SyncedFlushUtil { } return listener.result; } - } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 37a8fd388b8..ba1c16d61ab 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -20,15 +20,12 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.carrotsearch.randomizedtesting.Randomness; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.http.impl.client.HttpClients; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.SuppressForbidden; import org.apache.lucene.util.TestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; @@ -36,7 +33,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -44,8 +40,9 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; @@ -63,6 +60,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -137,9 +135,7 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.MalformedURLException; import java.net.URL; -import java.net.UnknownHostException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; @@ -162,6 +158,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; +import static org.elasticsearch.client.Requests.syncedFlushRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -1499,13 +1496,13 @@ public abstract class ESIntegTestCase extends ESTestCase { if (randomBoolean()) { client().admin().indices().prepareFlush(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute( new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); - } else if (isInternalCluster()) { - internalCluster().getInstance(SyncedFlushService.class).attemptSyncedFlush(indices, IndicesOptions.lenientExpandOpen(), - new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + } else { + client().admin().indices().syncedFlush(syncedFlushRequest(indices).indicesOptions(IndicesOptions.lenientExpandOpen()), + new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } else if (rarely()) { client().admin().indices().prepareForceMerge(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).setMaxNumSegments(between(1, 10)).setFlush(maybeFlush && randomBoolean()).execute( - new LatchedActionListener(newLatch(inFlightAsyncOperations))); + new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) { From 88a010468731e354f3b95c2b44526227dab64940 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 16 Dec 2015 17:36:14 +0100 Subject: [PATCH 069/322] [TEST] ScriptServiceTests only test single ScriptEngineService --- .../script/ScriptServiceTests.java | 87 +++++++++---------- 1 file changed, 41 insertions(+), 46 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 5abdc4f79bb..e81b0894eca 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -33,6 +33,7 @@ import org.junit.Before; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -48,7 +49,7 @@ import static org.hamcrest.Matchers.sameInstance; public class ScriptServiceTests extends ESTestCase { private ResourceWatcherService resourceWatcherService; - private Set scriptEngineServices; + private ScriptEngineService scriptEngineService; private Map scriptEnginesByLangMap; private ScriptContextRegistry scriptContextRegistry; private ScriptContext[] scriptContexts; @@ -72,8 +73,8 @@ public class ScriptServiceTests extends ESTestCase { .put("path.conf", genericConfigFolder) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); - scriptEngineServices = newHashSet(new TestEngineService()); - scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(scriptEngineServices); + scriptEngineService = new TestEngineService(); + scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(Collections.singleton(scriptEngineService)); //randomly register custom script contexts int randomInt = randomIntBetween(0, 3); //prevent duplicates using map @@ -100,7 +101,7 @@ public class ScriptServiceTests extends ESTestCase { private void buildScriptService(Settings additionalSettings) throws IOException { Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build(); Environment environment = new Environment(finalSettings); - scriptService = new ScriptService(finalSettings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry) { + scriptService = new ScriptService(finalSettings, environment, Collections.singleton(scriptEngineService), resourceWatcherService, scriptContextRegistry) { @Override String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders headersContext) { //mock the script that gets retrieved from an index @@ -230,8 +231,6 @@ public class ScriptServiceTests extends ESTestCase { for (int i = 0; i < numEngineSettings; i++) { String settingKey; do { - ScriptEngineService[] scriptEngineServices = this.scriptEngineServices.toArray(new ScriptEngineService[this.scriptEngineServices.size()]); - ScriptEngineService scriptEngineService = randomFrom(scriptEngineServices); ScriptType scriptType = randomFrom(ScriptType.values()); ScriptContext scriptContext = randomFrom(this.scriptContexts); settingKey = scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey(); @@ -288,40 +287,38 @@ public class ScriptServiceTests extends ESTestCase { buildScriptService(builder.build()); createFileScripts("groovy", "expression", "mustache", "test"); - for (ScriptEngineService scriptEngineService : scriptEngineServices) { - for (ScriptType scriptType : ScriptType.values()) { - //make sure file scripts have a different name than inline ones. - //Otherwise they are always considered file ones as they can be found in the static cache. - String script = scriptType == ScriptType.FILE ? "file_script" : "script"; - for (ScriptContext scriptContext : this.scriptContexts) { - //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings - ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey()); - if (scriptMode == null) { - scriptMode = scriptContextSettings.get(scriptContext.getKey()); - } - if (scriptMode == null) { - scriptMode = scriptSourceSettings.get(scriptType); - } - if (scriptMode == null) { - scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); - } + for (ScriptType scriptType : ScriptType.values()) { + //make sure file scripts have a different name than inline ones. + //Otherwise they are always considered file ones as they can be found in the static cache. + String script = scriptType == ScriptType.FILE ? "file_script" : "script"; + for (ScriptContext scriptContext : this.scriptContexts) { + //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings + ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey()); + if (scriptMode == null) { + scriptMode = scriptContextSettings.get(scriptContext.getKey()); + } + if (scriptMode == null) { + scriptMode = scriptSourceSettings.get(scriptType); + } + if (scriptMode == null) { + scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); + } - for (String lang : scriptEngineService.types()) { - switch (scriptMode) { - case ON: + for (String lang : scriptEngineService.types()) { + switch (scriptMode) { + case ON: + assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); + break; + case OFF: + assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); + break; + case SANDBOX: + if (scriptEngineService.sandboxed()) { assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); - break; - case OFF: + } else { assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); - break; - case SANDBOX: - if (scriptEngineService.sandboxed()) { - assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); - } else { - assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); - } - break; - } + } + break; } } } @@ -338,15 +335,13 @@ public class ScriptServiceTests extends ESTestCase { unknownContext = randomAsciiOfLength(randomIntBetween(1, 30)); } while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext))); - for (ScriptEngineService scriptEngineService : scriptEngineServices) { - for (String type : scriptEngineService.types()) { - try { - scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), contextAndHeaders); - fail("script compilation should have been rejected"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); - } + for (String type : scriptEngineService.types()) { + try { + scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( + pluginName, unknownContext), contextAndHeaders); + fail("script compilation should have been rejected"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); } } } From 53b3cd83a573648e153f6548c2b8c085b864186f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 24 Nov 2015 20:39:26 -0500 Subject: [PATCH 070/322] Tribe nodes should apply cluster state updates in batches This commit applies the general mechanism for applying cluster state updates in batches to tribe nodes. Relates #14899, relates #14725 --- .../org/elasticsearch/tribe/TribeService.java | 275 ++++++++++-------- 1 file changed, 157 insertions(+), 118 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 87da13fad4a..62647ad3829 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -20,14 +20,14 @@ package org.elasticsearch.tribe; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -208,142 +209,180 @@ public class TribeService extends AbstractLifecycleComponent { } } - class TribeClusterStateListener implements ClusterStateListener { + class TribeClusterStateListener implements ClusterStateListener { private final String tribeName; + private final TribeNodeClusterStateTaskExecutor executor; TribeClusterStateListener(Node tribeNode) { - this.tribeName = tribeNode.settings().get(TRIBE_NAME); + String tribeName = tribeNode.settings().get(TRIBE_NAME); + this.tribeName = tribeName; + executor = new TribeNodeClusterStateTaskExecutor(tribeName); } @Override public void clusterChanged(final ClusterChangedEvent event) { logger.debug("[{}] received cluster event, [{}]", tribeName, event.source()); - clusterService.submitStateUpdateTask("cluster event from " + tribeName + ", " + event.source(), new ClusterStateUpdateTask() { - @Override - public boolean runOnlyOnMaster() { - return false; + clusterService.submitStateUpdateTask( + "cluster event from " + tribeName + ", " + event.source(), + event, + ClusterStateTaskConfig.build(Priority.NORMAL), + executor, + (source, t) -> logger.warn("failed to process [{}]", t, source)); + } + } + + class TribeNodeClusterStateTaskExecutor implements ClusterStateTaskExecutor { + private final String tribeName; + + TribeNodeClusterStateTaskExecutor(String tribeName) { + this.tribeName = tribeName; + } + + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState accumulator = ClusterState.builder(currentState).build(); + BatchResult.Builder builder = BatchResult.builder(); + + try { + // we only need to apply the latest cluster state update + accumulator = applyUpdate(accumulator, tasks.get(tasks.size() - 1)); + builder.successes(tasks); + } catch (Throwable t) { + builder.failures(tasks, t); + } + + return builder.build(accumulator); + } + + private ClusterState applyUpdate(ClusterState currentState, ClusterChangedEvent task) { + boolean clusterStateChanged = false; + ClusterState tribeState = task.state(); + DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes()); + // -- merge nodes + // go over existing nodes, and see if they need to be removed + for (DiscoveryNode discoNode : currentState.nodes()) { + String markedTribeName = discoNode.attributes().get(TRIBE_NAME); + if (markedTribeName != null && markedTribeName.equals(tribeName)) { + if (tribeState.nodes().get(discoNode.id()) == null) { + clusterStateChanged = true; + logger.info("[{}] removing node [{}]", tribeName, discoNode); + nodes.remove(discoNode.id()); + } } + } + // go over tribe nodes, and see if they need to be added + for (DiscoveryNode tribe : tribeState.nodes()) { + if (currentState.nodes().get(tribe.id()) == null) { + // a new node, add it, but also add the tribe name to the attributes + Map tribeAttr = new HashMap<>(); + for (ObjectObjectCursor attr : tribe.attributes()) { + tribeAttr.put(attr.key, attr.value); + } + tribeAttr.put(TRIBE_NAME, tribeName); + DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), unmodifiableMap(tribeAttr), tribe.version()); + clusterStateChanged = true; + logger.info("[{}] adding node [{}]", tribeName, discoNode); + nodes.put(discoNode); + } + } - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - ClusterState tribeState = event.state(); - DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes()); - // -- merge nodes - // go over existing nodes, and see if they need to be removed - for (DiscoveryNode discoNode : currentState.nodes()) { - String markedTribeName = discoNode.attributes().get(TRIBE_NAME); - if (markedTribeName != null && markedTribeName.equals(tribeName)) { - if (tribeState.nodes().get(discoNode.id()) == null) { - logger.info("[{}] removing node [{}]", tribeName, discoNode); - nodes.remove(discoNode.id()); - } - } + // -- merge metadata + ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + MetaData.Builder metaData = MetaData.builder(currentState.metaData()); + RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + // go over existing indices, and see if they need to be removed + for (IndexMetaData index : currentState.metaData()) { + String markedTribeName = index.getSettings().get(TRIBE_NAME); + if (markedTribeName != null && markedTribeName.equals(tribeName)) { + IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex()); + clusterStateChanged = true; + if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) { + logger.info("[{}] removing index [{}]", tribeName, index.getIndex()); + removeIndex(blocks, metaData, routingTable, index); + } else { + // always make sure to update the metadata and routing table, in case + // there are changes in them (new mapping, shards moving from initializing to started) + routingTable.add(tribeState.routingTable().index(index.getIndex())); + Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); + metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); } - // go over tribe nodes, and see if they need to be added - for (DiscoveryNode tribe : tribeState.nodes()) { - if (currentState.nodes().get(tribe.id()) == null) { - // a new node, add it, but also add the tribe name to the attributes - Map tribeAttr = new HashMap<>(); - for (ObjectObjectCursor attr : tribe.attributes()) { - tribeAttr.put(attr.key, attr.value); - } - tribeAttr.put(TRIBE_NAME, tribeName); - DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), unmodifiableMap(tribeAttr), tribe.version()); - logger.info("[{}] adding node [{}]", tribeName, discoNode); - nodes.put(discoNode); - } + } + } + // go over tribe one, and see if they need to be added + for (IndexMetaData tribeIndex : tribeState.metaData()) { + // if there is no routing table yet, do nothing with it... + IndexRoutingTable table = tribeState.routingTable().index(tribeIndex.getIndex()); + if (table == null) { + continue; + } + final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex()); + if (indexMetaData == null) { + if (!droppedIndices.contains(tribeIndex.getIndex())) { + // a new index, add it, and add the tribe name as a setting + clusterStateChanged = true; + logger.info("[{}] adding index [{}]", tribeName, tribeIndex.getIndex()); + addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); } - - // -- merge metadata - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - MetaData.Builder metaData = MetaData.builder(currentState.metaData()); - RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); - // go over existing indices, and see if they need to be removed - for (IndexMetaData index : currentState.metaData()) { - String markedTribeName = index.getSettings().get(TRIBE_NAME); - if (markedTribeName != null && markedTribeName.equals(tribeName)) { - IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex()); - if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) { - logger.info("[{}] removing index [{}]", tribeName, index.getIndex()); - removeIndex(blocks, metaData, routingTable, index); - } else { - // always make sure to update the metadata and routing table, in case - // there are changes in them (new mapping, shards moving from initializing to started) - routingTable.add(tribeState.routingTable().index(index.getIndex())); - Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); - metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); - } - } - } - // go over tribe one, and see if they need to be added - for (IndexMetaData tribeIndex : tribeState.metaData()) { - // if there is no routing table yet, do nothing with it... - IndexRoutingTable table = tribeState.routingTable().index(tribeIndex.getIndex()); - if (table == null) { - continue; - } - final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex()); - if (indexMetaData == null) { - if (!droppedIndices.contains(tribeIndex.getIndex())) { - // a new index, add it, and add the tribe name as a setting - logger.info("[{}] adding index [{}]", tribeName, tribeIndex.getIndex()); + } else { + String existingFromTribe = indexMetaData.getSettings().get(TRIBE_NAME); + if (!tribeName.equals(existingFromTribe)) { + // we have a potential conflict on index names, decide what to do... + if (ON_CONFLICT_ANY.equals(onConflict)) { + // we chose any tribe, carry on + } else if (ON_CONFLICT_DROP.equals(onConflict)) { + // drop the indices, there is a conflict + clusterStateChanged = true; + logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); + removeIndex(blocks, metaData, routingTable, tribeIndex); + droppedIndices.add(tribeIndex.getIndex()); + } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { + // on conflict, prefer a tribe... + String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); + if (tribeName.equals(preferredTribeName)) { + // the new one is hte preferred one, replace... + clusterStateChanged = true; + logger.info("[{}] adding index [{}], preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); + removeIndex(blocks, metaData, routingTable, tribeIndex); addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); - } - } else { - String existingFromTribe = indexMetaData.getSettings().get(TRIBE_NAME); - if (!tribeName.equals(existingFromTribe)) { - // we have a potential conflict on index names, decide what to do... - if (ON_CONFLICT_ANY.equals(onConflict)) { - // we chose any tribe, carry on - } else if (ON_CONFLICT_DROP.equals(onConflict)) { - // drop the indices, there is a conflict - logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); - removeIndex(blocks, metaData, routingTable, tribeIndex); - droppedIndices.add(tribeIndex.getIndex()); - } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { - // on conflict, prefer a tribe... - String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); - if (tribeName.equals(preferredTribeName)) { - // the new one is hte preferred one, replace... - logger.info("[{}] adding index [{}], preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); - removeIndex(blocks, metaData, routingTable, tribeIndex); - addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); - } // else: either the existing one is the preferred one, or we haven't seen one, carry on - } - } + } // else: either the existing one is the preferred one, or we haven't seen one, carry on } } - - return ClusterState.builder(currentState).incrementVersion().blocks(blocks).nodes(nodes).metaData(metaData).routingTable(routingTable.build()).build(); } + } - private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { - metaData.remove(index.getIndex()); - routingTable.remove(index.getIndex()); - blocks.removeIndexBlocks(index.getIndex()); - } + if (!clusterStateChanged) { + return currentState; + } else { + return ClusterState.builder(currentState).incrementVersion().blocks(blocks).nodes(nodes).metaData(metaData).routingTable(routingTable.build()).build(); + } + } - private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { - Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); - metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); - routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); - if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); - } - if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK); - } - if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); - } - } + private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { + metaData.remove(index.getIndex()); + routingTable.remove(index.getIndex()); + blocks.removeIndexBlocks(index.getIndex()); + } - @Override - public void onFailure(String source, Throwable t) { - logger.warn("failed to process [{}]", t, source); - } - }); + private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { + Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); + metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); + routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); + if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); + } + if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK); + } + if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); + } } } } From d9a24961c55b80d64a10f12ea1ad97d04d9d9e16 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 12:49:41 -0500 Subject: [PATCH 071/322] Fix minor issues in delimited payload token filter docs This commit addresses a few minor issues in the delimited payload token filter docs: - the provided example reversed the payloads associated with the tokens "the" and "fox" - two additional typos in the same sentence - "per default" -> "by default" - "default int to" -> "default into" - adds two serial commas --- .../tokenfilters/delimited-payload-tokenfilter.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc index 293b51a0331..b64f5edbeb9 100644 --- a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc @@ -3,9 +3,7 @@ Named `delimited_payload_filter`. Splits tokens into tokens and payload whenever a delimiter character is found. -Example: "the|1 quick|2 fox|3" is split per default int to tokens `fox`, `quick` and `the` with payloads `1`, `2` and `3` respectively. - - +Example: "the|1 quick|2 fox|3" is split by default into tokens `the`, `quick`, and `fox` with payloads `1`, `2`, and `3` respectively. Parameters: From 45b7afe14f4d8f300e808f1987fd74e3e61172df Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 20:06:43 +0100 Subject: [PATCH 072/322] more tests --- .../common/settings/AbstractScopedSettings.java | 2 +- .../org/elasticsearch/common/settings/Setting.java | 2 +- .../cluster/settings/ClusterSettingsIT.java | 10 ++++++++++ .../common/settings/ScopedSettingsTests.java | 2 ++ 4 files changed, 14 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 1efc9d3fed6..13743cabcf6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -43,7 +43,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { if (entry.getScope() != scope) { throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); } - if (entry.isGroupSetting()) { + if (entry.hasComplexMatcher()) { complexMatchers.put(entry.getKey(), entry); } else { keySettings.put(entry.getKey(), entry); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 884479d7cb6..42f77a18c9c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -92,7 +92,7 @@ public class Setting extends ToXContentToBytes { * rather than a single value. The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like cluster.store. * that matches all settings with this prefix. */ - public boolean isGroupSetting() { + boolean isGroupSetting() { return false; } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 5dccbb546ce..fb8a8e28b33 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -235,6 +235,16 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertThat(response3.getPersistentSettings().get(key2), notNullValue()); } + public void testCanUpdateTracerSettings() { + ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*", + "internal:gateway/local*")) + .get(); + assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*", + "internal:gateway/local*"}); + } + public void testUpdateDiscoveryPublishTimeout() { DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index b54784dc16d..a4148594de6 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -151,5 +151,7 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(ref.get().size(), 2); assertTrue(ref.get().contains("internal:index/shard/recovery/*")); assertTrue(ref.get().contains("internal:gateway/local*")); + assertNotNull(settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + assertSame(TransportService.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); } } From 161cabbef41133e47ab5462d94e67e9aa1f91168 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Dec 2015 20:14:00 +0100 Subject: [PATCH 073/322] organize tests --- .../common/settings/ScopedSettingsTests.java | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index a4148594de6..97393c51b8d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -115,11 +115,20 @@ public class ScopedSettingsTests extends ESTestCase { public void testGet() { ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + // group setting - complex matcher Setting setting = settings.get("cluster.routing.allocation.require.value"); assertEquals(setting, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING); setting = settings.get("cluster.routing.allocation.total_shards_per_node"); assertEquals(setting, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING); + + // array settings - complex matcher + assertNotNull(settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + assertSame(TransportService.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + + // array settings - complex matcher - only accepts numbers + assertNull(settings.get("transport.tracer.include.FOO")); } public void testIsDynamic(){ @@ -127,6 +136,10 @@ public class ScopedSettingsTests extends ESTestCase { assertFalse(settings.hasDynamicSetting("foo.bar.baz")); assertTrue(settings.hasDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); + settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + assertTrue(settings.hasDynamicSetting("transport.tracer.include." + randomIntBetween(1, 100))); + assertFalse(settings.hasDynamicSetting("transport.tracer.include.BOOM")); + assertTrue(settings.hasDynamicSetting("cluster.routing.allocation.require.value")); } public void testDiff() throws IOException { @@ -151,7 +164,5 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(ref.get().size(), 2); assertTrue(ref.get().contains("internal:index/shard/recovery/*")); assertTrue(ref.get().contains("internal:gateway/local*")); - assertNotNull(settings.get("transport.tracer.include." + randomIntBetween(1, 100))); - assertSame(TransportService.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); } } From 67e0b5190a155b58b790bafbf56aca721f3c0611 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 16 Dec 2015 11:56:55 -0800 Subject: [PATCH 074/322] Remove old docs gitignores and comment on docs build dir --- .gitignore | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index b7d5d95a256..add7a22e840 100644 --- a/.gitignore +++ b/.gitignore @@ -32,10 +32,10 @@ dependency-reduced-pom.xml # osx stuff .DS_Store -# random old stuff that we should look at the necessity of... -docs/html/ -docs/build.log +# needed in case docs build is run...maybe we can configure doc build to generate files under build? html_docs + +# random old stuff that we should look at the necessity of... /tmp/ backwards/ From ee79d465831e708a5d431aa5ff69aea4756bf799 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 16 Dec 2015 16:38:16 -0500 Subject: [PATCH 075/322] Add gradle thirdPartyAudit to precommit tasks --- build.gradle | 10 ++ .../gradle/precommit/PrecommitTasks.groovy | 3 +- .../precommit/ThirdPartyAuditTask.groovy | 162 ++++++++++++++++++ core/build.gradle | 8 + modules/lang-expression/build.gradle | 4 + modules/lang-groovy/build.gradle | 9 + plugins/discovery-azure/build.gradle | 11 ++ plugins/discovery-ec2/build.gradle | 9 + plugins/discovery-gce/build.gradle | 3 + plugins/lang-plan-a/build.gradle | 3 + plugins/lang-python/build.gradle | 37 ++++ plugins/mapper-attachments/build.gradle | 7 + plugins/repository-hdfs/build.gradle | 5 +- plugins/repository-s3/build.gradle | 9 + qa/evil-tests/build.gradle | 11 ++ test-framework/build.gradle | 2 + 16 files changed, 291 insertions(+), 2 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy diff --git a/build.gradle b/build.gradle index 831db456a19..8a95fa90925 100644 --- a/build.gradle +++ b/build.gradle @@ -123,6 +123,16 @@ subprojects { } } } + // For reasons we don't fully understand yet, external dependencies are not picked up by Ant's optional tasks. + // But you can easily do it in another way. + // Only if your buildscript and Ant's optional task need the same library would you have to define it twice. + // https://docs.gradle.org/current/userguide/organizing_build_logic.html + configurations { + forbiddenApis + } + dependencies { + forbiddenApis 'de.thetaphi:forbiddenapis:2.0' + } } // Ensure similar tasks in dependent projects run first. The projectsEvaluated here is diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 04878d979e9..ef2a49cc444 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -34,7 +34,8 @@ class PrecommitTasks { List precommitTasks = [ configureForbiddenApis(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), - project.tasks.create('jarHell', JarHellTask.class)] + project.tasks.create('jarHell', JarHellTask.class), + project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)] // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy new file mode 100644 index 00000000000..b5ce35816a1 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit + +import org.gradle.api.DefaultTask +import org.gradle.api.artifacts.UnknownConfigurationException +import org.gradle.api.file.FileCollection +import org.gradle.api.tasks.TaskAction + +import org.apache.tools.ant.BuildLogger +import org.apache.tools.ant.Project + +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Basic static checking to keep tabs on third party JARs + */ +public class ThirdPartyAuditTask extends DefaultTask { + + // true to be lenient about MISSING CLASSES + private boolean lenient; + + // patterns for classes to exclude, because we understand their issues + private String[] excludes = new String[0]; + + ThirdPartyAuditTask() { + dependsOn(project.configurations.testCompile) + description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'" + } + + /** + * Set to true to be lenient with dependencies. By default this check will fail if it finds + * MISSING CLASSES. This means the set of jars is incomplete. However, in some cases + * this can be due to intentional exclusions that are well-tested and understood. + */ + public void setLenient(boolean value) { + lenient = value; + } + + /** + * Returns true if leniency about missing classes is enabled. + */ + public boolean isLenient() { + return lenient; + } + + /** + * classes that should be excluded from the scan, + * e.g. because we know what sheisty stuff those particular classes are up to. + */ + public void setExcludes(String[] classes) { + for (String s : classes) { + if (s.indexOf('*') != -1) { + throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!") + } + } + excludes = classes; + } + + /** + * Returns current list of exclusions. + */ + public String[] getExcludes() { + return excludes; + } + + @TaskAction + public void check() { + AntBuilder ant = new AntBuilder() + + // we are noisy for many reasons, working around performance problems with forbidden-apis, dealing + // with warnings about missing classes, etc. so we use our own "quiet" AntBuilder + ant.project.buildListeners.each { listener -> + if (listener instanceof BuildLogger) { + listener.messageOutputLevel = Project.MSG_ERR; + } + }; + + // we only want third party dependencies. + FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> + dependency.group != "org.elasticsearch" + }) + + // we don't want provided dependencies, which we have already scanned. e.g. don't + // scan ES core's dependencies for every single plugin + try { + jars -= project.configurations.getByName("provided") + } catch (UnknownConfigurationException ignored) {} + + // no dependencies matched, we are done + if (jars.isEmpty()) { + return; + } + + ant.taskdef(name: "thirdPartyAudit", + classname: "de.thetaphi.forbiddenapis.ant.AntTask", + classpath: project.configurations.forbiddenApis.asPath) + + // print which jars we are going to scan, always + // this is not the time to try to be succinct! Forbidden will print plenty on its own! + Set names = new HashSet<>() + for (File jar : jars) { + names.add(jar.getName()) + } + Logger logger = LoggerFactory.getLogger(getClass()); + logger.error("[thirdPartyAudit] Scanning: " + names) + + // warn that you won't see any forbidden apis warnings + if (lenient) { + logger.warn("[thirdPartyAudit] WARNING: leniency is enabled, will not fail if classes are missing!") + } + + // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, + // and then remove our temp dir afterwards. don't complain: try it yourself. + // we don't use gradle temp dir handling, just google it, or try it yourself. + + File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit') + + // clean up any previous mess (if we failed), then unzip everything to one directory + ant.delete(dir: tmpDir.getAbsolutePath()) + tmpDir.mkdirs() + for (File jar : jars) { + ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()) + } + + // convert exclusion class names to binary file names + String[] excludedFiles = new String[excludes.length]; + for (int i = 0; i < excludes.length; i++) { + excludedFiles[i] = excludes[i].replace('.', '/') + ".class" + // check if the excluded file exists, if not, sure sign things are outdated + if (! new File(tmpDir, excludedFiles[i]).exists()) { + throw new IllegalStateException("bogus thirdPartyAudit exclusion: '" + excludes[i] + "', not found in any dependency") + } + } + + ant.thirdPartyAudit(internalRuntimeForbidden: true, + failOnUnsupportedJava: false, + failOnMissingClasses: !lenient, + classpath: project.configurations.testCompile.asPath) { + fileset(dir: tmpDir, excludes: excludedFiles.join(',')) + } + // clean up our mess (if we succeed) + ant.delete(dir: tmpDir.getAbsolutePath()) + } +} diff --git a/core/build.gradle b/core/build.gradle index fd8a0c10f5a..e00164af972 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -111,6 +111,14 @@ forbiddenPatterns { exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt' } +// classes are missing, e.g. org.jboss.marshalling.Marshaller +thirdPartyAudit.lenient = true +// uses internal sun ssl classes! +thirdPartyAudit.excludes = [ + // sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', +] + // dependency license are currently checked in distribution dependencyLicenses.enabled = false diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 9f62e34687d..aac94570a36 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -33,6 +33,10 @@ dependencyLicenses { mapping from: /lucene-.*/, to: 'lucene' } +// do we or do we not depend on asm-tree, that is the question +// classes are missing, e.g. org.objectweb.asm.tree.LabelNode +thirdPartyAudit.lenient = true + compileJava.options.compilerArgs << '-Xlint:-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 341dcbf0d6c..2bacd2dd560 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -35,3 +35,12 @@ integTest { systemProperty 'es.script.indexed', 'on' } } + +// classes are missing, e.g. jline.console.completer.Completer +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // sun.misc.Unsafe + 'groovy.json.internal.FastStringUtils', + 'groovy.json.internal.FastStringUtils$StringImplementation$1', + 'groovy.json.internal.FastStringUtils$StringImplementation$2', +] diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 5042824eb07..e823351dc36 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -66,3 +66,14 @@ compileJava.options.compilerArgs << '-Xlint:-deprecation' // TODO: and why does this static not show up in maven... compileTestJava.options.compilerArgs << '-Xlint:-static' +// classes are missing, e.g. org.osgi.framework.BundleActivator +thirdPartyAudit.lenient = true +// WE ARE JAR HELLING WITH THE JDK AND THAT IS WHY THIS HAPPENS +// TODO: fix this!!!!!!!!!!! +thirdPartyAudit.excludes = [ + // com.sun.xml.fastinfoset.stax.StAXDocumentParser + 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector', + 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector$CharSequenceImpl', + // com.sun.xml.fastinfoset.stax.StAXDocumentSerializer + 'com.sun.xml.bind.v2.runtime.output.FastInfosetStreamWriterOutput', +] diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 77cfd6626d5..04eafd07866 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -48,3 +48,12 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +// classes are missing, e.g. org.apache.avalon.framework.logger.Logger +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', +] diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 4e6ade8788f..2ec479a3e80 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -31,3 +31,6 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +// classes are missing, e.g. org.apache.log.Logger +thirdPartyAudit.lenient = true diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index 618c094f683..c23c3a30efc 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -33,6 +33,9 @@ dependencies { compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-unchecked' +// classes are missing, e.g. org.objectweb.asm.tree.LabelNode +thirdPartyAudit.lenient = true + // regeneration logic, comes in via ant right now // don't port it to gradle, it works fine. diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index 269a3249386..0c5edc13522 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -36,3 +36,40 @@ integTest { } } +// classes are missing, e.g. org.tukaani.xz.FilterOptions +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + + // sun.misc.Cleaner + 'org.python.netty.util.internal.Cleaner0', + + // sun.misc.Signal + 'jnr.posix.JavaPOSIX', + 'jnr.posix.JavaPOSIX$SunMiscSignalHandler', + + // sun.misc.Unsafe + 'com.kenai.jffi.MemoryIO$UnsafeImpl', + 'com.kenai.jffi.MemoryIO$UnsafeImpl32', + 'com.kenai.jffi.MemoryIO$UnsafeImpl64', + 'org.python.google.common.cache.Striped64', + 'org.python.google.common.cache.Striped64$1', + 'org.python.google.common.cache.Striped64$Cell', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$2', + 'org.python.netty.util.internal.PlatformDependent0', + 'org.python.netty.util.internal.UnsafeAtomicIntegerFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicLongFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicReferenceFieldUpdater', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$1', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$TreeBin', + 'org.python.netty.util.internal.chmv8.CountedCompleter', + 'org.python.netty.util.internal.chmv8.CountedCompleter$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', + 'org.python.netty.util.internal.chmv8.ForkJoinTask', + 'org.python.netty.util.internal.chmv8.ForkJoinTask$1', +] diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index e14cf543043..867b316fbf5 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -69,3 +69,10 @@ forbiddenPatterns { exclude '**/*.pdf' exclude '**/*.epub' } + +// classes are missing, e.g. org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // com.sun.syndication (SyndFeedInput, SyndFeed, SyndEntry, SyndContent) + 'org.apache.tika.parser.feed.FeedParser', +] diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 8f18f67f70d..c09a3ff4d67 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -200,4 +200,7 @@ integTest { cluster { plugin(pluginProperties.extension.name, zipTree(distZipHadoop2.archivePath)) } -} \ No newline at end of file +} + +// classes are missing, e.g. org.mockito.Mockito +thirdPartyAudit.lenient = true diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 32ad37530c2..bd38e92f4c8 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -49,3 +49,12 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +// classes are missing, e.g. org.apache.log.Logger +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', +] diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 96aa6fb635d..bb5a521e53c 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -34,3 +34,14 @@ dependencies { test { systemProperty 'tests.security.manager', 'false' } + +// classes are missing, com.ibm.icu.lang.UCharacter +thirdPartyAudit.lenient = true +thirdPartyAudit.excludes = [ + // sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', +] diff --git a/test-framework/build.gradle b/test-framework/build.gradle index a423f56c922..2263413bbde 100644 --- a/test-framework/build.gradle +++ b/test-framework/build.gradle @@ -47,3 +47,5 @@ forbiddenApisMain { // TODO: should we have licenses for our test deps? dependencyLicenses.enabled = false +// we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! +thirdPartyAudit.lenient = true From 49f37a526c1adcea498a3274026c9a929e9fc08a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 16 Dec 2015 16:50:52 -0500 Subject: [PATCH 076/322] fix bad indent --- .../elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index b5ce35816a1..010b19675d1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -91,7 +91,7 @@ public class ThirdPartyAuditTask extends DefaultTask { if (listener instanceof BuildLogger) { listener.messageOutputLevel = Project.MSG_ERR; } - }; + }; // we only want third party dependencies. FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> From 9aa41d092ed5b7ebe4bfd9bfe21450b8f92ee010 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 13:57:08 -0500 Subject: [PATCH 077/322] Info on compressed ordinary object pointers This commit adds to JvmInfo the status of whether or not compressed ordinary object pointers are enabled. Additionally, logging of the max heap size and the status of the compressed ordinary object pointers flag are provided on startup. Relates #13187, relates elastic/elasticsearch-definitive-guide#455 --- .../elasticsearch/env/NodeEnvironment.java | 38 +++++++++++++--- .../elasticsearch/monitor/jvm/JvmInfo.java | 44 ++++++++++++++++++- 2 files changed, 76 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 3a1b430f98b..86b6b704a72 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -21,7 +21,12 @@ package org.elasticsearch.env; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -31,6 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -38,11 +44,25 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsProbe; +import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.Closeable; import java.io.IOException; -import java.nio.file.*; -import java.util.*; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -145,7 +165,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) { Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId)); Files.createDirectories(dir); - + try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { @@ -187,6 +207,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } maybeLogPathDetails(); + maybeLogHeapDetails(); if (settings.getAsBoolean(SETTING_ENABLE_LUCENE_SEGMENT_INFOS_TRACE, false)) { SegmentInfos.setInfoStream(System.out); @@ -274,6 +295,13 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } } + private void maybeLogHeapDetails() { + ByteSizeValue maxHeapSize = JvmInfo.jvmInfo().getMem().getHeapMax(); + Boolean usingCompressedOops = JvmInfo.jvmInfo().usingCompressedOops(); + String usingCompressedOopsStatus = usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops); + logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, usingCompressedOopsStatus); + } + private static String toString(Collection items) { StringBuilder b = new StringBuilder(); for(String item : items) { @@ -811,7 +839,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { // Sanity check: assert Integer.parseInt(shardPath.getName(count-1).toString()) >= 0; assert "indices".equals(shardPath.getName(count-3).toString()); - + return shardPath.getParent().getParent().getParent(); } } diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index e224c722d42..7fbfb5bc82b 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -29,7 +29,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; -import java.lang.management.*; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.lang.management.ManagementPermission; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.PlatformManagedObject; +import java.lang.management.RuntimeMXBean; +import java.lang.reflect.Method; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -101,6 +108,21 @@ public class JvmInfo implements Streamable, ToXContent { info.memoryPools[i] = memoryPoolMXBean.getName(); } + try { + @SuppressWarnings("unchecked") Class clazz = + (Class)Class.forName("com.sun.management.HotSpotDiagnosticMXBean"); + Class vmOptionClazz = Class.forName("com.sun.management.VMOption"); + PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz); + Method vmOptionMethod = clazz.getMethod("getVMOption", String.class); + Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); + Method valueMethod = vmOptionClazz.getMethod("getValue"); + String value = (String)valueMethod.invoke(useCompressedOopsVmOption); + info.usingCompressedOops = Boolean.parseBoolean(value); + } catch (Throwable t) { + // unable to deduce the state of compressed oops + // usingCompressedOops will hold its default value of null + } + INSTANCE = info; } @@ -135,6 +157,8 @@ public class JvmInfo implements Streamable, ToXContent { String[] gcCollectors = Strings.EMPTY_ARRAY; String[] memoryPools = Strings.EMPTY_ARRAY; + private Boolean usingCompressedOops; + private JvmInfo() { } @@ -258,6 +282,10 @@ public class JvmInfo implements Streamable, ToXContent { return this.systemProperties; } + public Boolean usingCompressedOops() { + return this.usingCompressedOops; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.JVM); @@ -279,6 +307,8 @@ public class JvmInfo implements Streamable, ToXContent { builder.field(Fields.GC_COLLECTORS, gcCollectors); builder.field(Fields.MEMORY_POOLS, memoryPools); + builder.field(Fields.USING_COMPRESSED_OOPS, usingCompressedOops == null ? "unknown" : usingCompressedOops); + builder.endObject(); return builder; } @@ -306,6 +336,7 @@ public class JvmInfo implements Streamable, ToXContent { static final XContentBuilderString DIRECT_MAX_IN_BYTES = new XContentBuilderString("direct_max_in_bytes"); static final XContentBuilderString GC_COLLECTORS = new XContentBuilderString("gc_collectors"); static final XContentBuilderString MEMORY_POOLS = new XContentBuilderString("memory_pools"); + static final XContentBuilderString USING_COMPRESSED_OOPS = new XContentBuilderString("using_compressed_ordinary_object_pointers"); } public static JvmInfo readJvmInfo(StreamInput in) throws IOException { @@ -337,6 +368,11 @@ public class JvmInfo implements Streamable, ToXContent { mem.readFrom(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); + if (in.readBoolean()) { + usingCompressedOops = in.readBoolean(); + } else { + usingCompressedOops = null; + } } @Override @@ -361,6 +397,12 @@ public class JvmInfo implements Streamable, ToXContent { mem.writeTo(out); out.writeStringArray(gcCollectors); out.writeStringArray(memoryPools); + if (usingCompressedOops != null) { + out.writeBoolean(true); + out.writeBoolean(usingCompressedOops); + } else { + out.writeBoolean(false); + } } public static class Mem implements Streamable { From 12e241ff1ac6c9b9418c69b821c644703481cf77 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 17:29:08 -0500 Subject: [PATCH 078/322] Ensure compressed oops flag is written as a string --- core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index 7fbfb5bc82b..8e1fb9ba7db 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -307,7 +307,7 @@ public class JvmInfo implements Streamable, ToXContent { builder.field(Fields.GC_COLLECTORS, gcCollectors); builder.field(Fields.MEMORY_POOLS, memoryPools); - builder.field(Fields.USING_COMPRESSED_OOPS, usingCompressedOops == null ? "unknown" : usingCompressedOops); + builder.field(Fields.USING_COMPRESSED_OOPS, usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops)); builder.endObject(); return builder; From 207f0a4eda73fd706a7e8e8dc938b8bedec0740e Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Wed, 16 Dec 2015 17:53:55 -0500 Subject: [PATCH 079/322] use async translog for this test --- .../java/org/elasticsearch/indices/stats/IndexStatsIT.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index a87da6fc046..4bdd972ea9c 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -23,8 +23,8 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -46,11 +46,12 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.EnumSet; @@ -315,7 +316,7 @@ public class IndexStatsIT extends ESIntegTestCase { .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") .put("index.merge.policy.type", "tiered") - + .put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, "ASYNC") )); ensureGreen(); long termUpto = 0; From 7e14245c698b6206824fe320c8b64c384493d852 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 18:43:31 -0500 Subject: [PATCH 080/322] Add callback for publication of new cluster state This commit adds a callback for a cluster state task executor that will be invoked if the execution of a batch of cluster state update tasks led to a new cluster state and that new cluster state was successfully published. Closes #15482 --- .../cluster/ClusterStateTaskExecutor.java | 7 +++++ .../service/InternalClusterService.java | 2 ++ .../cluster/ClusterServiceIT.java | 30 +++++++++++++++++-- 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index ab85d9540f0..fb22c2ca368 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -37,6 +37,13 @@ public interface ClusterStateTaskExecutor { return true; } + /** + * Callback invoked after new cluster state is published. Note that + * this method is not invoked if the cluster state was not updated. + */ + default void clusterStatePublished(ClusterState newClusterState) { + } + /** * Represents the result of a batched execution of cluster state update tasks * @param the type of the cluster state update task diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index b8c898a31e9..4888fc9d48e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -605,6 +605,8 @@ public class InternalClusterService extends AbstractLifecycleComponent { private AtomicInteger counter = new AtomicInteger(); + private AtomicInteger batches = new AtomicInteger(); + private AtomicInteger published = new AtomicInteger(); @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { tasks.forEach(task -> task.execute()); counter.addAndGet(tasks.size()); - return BatchResult.builder().successes(tasks).build(currentState); + ClusterState maybeUpdatedClusterState = currentState; + if (randomBoolean()) { + maybeUpdatedClusterState = ClusterState.builder(currentState).build(); + batches.incrementAndGet(); + } + return BatchResult.builder().successes(tasks).build(maybeUpdatedClusterState); } @Override public boolean runOnlyOnMaster() { return false; } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.incrementAndGet(); + } } int numberOfThreads = randomIntBetween(2, 8); int tasksSubmittedPerThread = randomIntBetween(1, 1024); @@ -838,6 +861,7 @@ public class ClusterServiceIT extends ESIntegTestCase { for (TaskExecutor executor : executors) { if (counts.containsKey(executor)) { assertEquals((int) counts.get(executor), executor.counter.get()); + assertEquals(executor.batches.get(), executor.published.get()); } } From 42138007db45fee85558a19ca8702e944ec706fd Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 16 Dec 2015 18:56:02 -0500 Subject: [PATCH 081/322] add some more comments about internal api usage --- .../gradle/precommit/ThirdPartyAuditTask.groovy | 4 ---- core/build.gradle | 2 +- modules/lang-groovy/build.gradle | 2 +- plugins/discovery-azure/build.gradle | 4 ++-- plugins/discovery-ec2/build.gradle | 6 +++--- plugins/lang-python/build.gradle | 8 ++++---- plugins/mapper-attachments/build.gradle | 2 +- plugins/repository-s3/build.gradle | 6 +++--- qa/evil-tests/build.gradle | 2 +- 9 files changed, 16 insertions(+), 20 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 010b19675d1..86ac767d06c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -26,9 +26,6 @@ import org.gradle.api.tasks.TaskAction import org.apache.tools.ant.BuildLogger import org.apache.tools.ant.Project -import org.slf4j.Logger -import org.slf4j.LoggerFactory - /** * Basic static checking to keep tabs on third party JARs */ @@ -119,7 +116,6 @@ public class ThirdPartyAuditTask extends DefaultTask { for (File jar : jars) { names.add(jar.getName()) } - Logger logger = LoggerFactory.getLogger(getClass()); logger.error("[thirdPartyAudit] Scanning: " + names) // warn that you won't see any forbidden apis warnings diff --git a/core/build.gradle b/core/build.gradle index e00164af972..302757ccc8b 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -115,7 +115,7 @@ forbiddenPatterns { thirdPartyAudit.lenient = true // uses internal sun ssl classes! thirdPartyAudit.excludes = [ - // sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', ] diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 2bacd2dd560..76686a760a2 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -39,7 +39,7 @@ integTest { // classes are missing, e.g. jline.console.completer.Completer thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // sun.misc.Unsafe + // uses internal java api: sun.misc.Unsafe 'groovy.json.internal.FastStringUtils', 'groovy.json.internal.FastStringUtils$StringImplementation$1', 'groovy.json.internal.FastStringUtils$StringImplementation$2', diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index e823351dc36..6f21364ea6d 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -71,9 +71,9 @@ thirdPartyAudit.lenient = true // WE ARE JAR HELLING WITH THE JDK AND THAT IS WHY THIS HAPPENS // TODO: fix this!!!!!!!!!!! thirdPartyAudit.excludes = [ - // com.sun.xml.fastinfoset.stax.StAXDocumentParser + // uses internal java api: com.sun.xml.fastinfoset.stax.StAXDocumentParser 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector', 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector$CharSequenceImpl', - // com.sun.xml.fastinfoset.stax.StAXDocumentSerializer + // uses internal java api: com.sun.xml.fastinfoset.stax.StAXDocumentSerializer 'com.sun.xml.bind.v2.runtime.output.FastInfosetStreamWriterOutput', ] diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 04eafd07866..14767c6783b 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -52,8 +52,8 @@ test { // classes are missing, e.g. org.apache.avalon.framework.logger.Logger thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // com.sun.org.apache.xpath.internal.XPathContext + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext 'com.amazonaws.util.XpathUtils', ] diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index 0c5edc13522..a7faedf6a78 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -39,17 +39,17 @@ integTest { // classes are missing, e.g. org.tukaani.xz.FilterOptions thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', - // sun.misc.Cleaner + // uses internal java api: sun.misc.Cleaner 'org.python.netty.util.internal.Cleaner0', - // sun.misc.Signal + // uses internal java api: sun.misc.Signal 'jnr.posix.JavaPOSIX', 'jnr.posix.JavaPOSIX$SunMiscSignalHandler', - // sun.misc.Unsafe + // uses internal java api: sun.misc.Unsafe 'com.kenai.jffi.MemoryIO$UnsafeImpl', 'com.kenai.jffi.MemoryIO$UnsafeImpl32', 'com.kenai.jffi.MemoryIO$UnsafeImpl64', diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index 867b316fbf5..f3c414a0718 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -73,6 +73,6 @@ forbiddenPatterns { // classes are missing, e.g. org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // com.sun.syndication (SyndFeedInput, SyndFeed, SyndEntry, SyndContent) + // uses internal java api: com.sun.syndication (SyndFeedInput, SyndFeed, SyndEntry, SyndContent) 'org.apache.tika.parser.feed.FeedParser', ] diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index bd38e92f4c8..82797788f8e 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -53,8 +53,8 @@ test { // classes are missing, e.g. org.apache.log.Logger thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // com.sun.org.apache.xpath.internal.XPathContext + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext 'com.amazonaws.util.XpathUtils', ] diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index bb5a521e53c..02ed75fccbb 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -38,7 +38,7 @@ test { // classes are missing, com.ibm.icu.lang.UCharacter thirdPartyAudit.lenient = true thirdPartyAudit.excludes = [ - // sun.misc.Unsafe + // uses internal java api: sun.misc.Unsafe 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$Cell', From 9a1133ca504cbf93114360230d40ccaa486a5dee Mon Sep 17 00:00:00 2001 From: Jason Bryan Date: Wed, 16 Dec 2015 17:43:50 -0500 Subject: [PATCH 082/322] Fix typo in scroll.asciidoc Fix scroll request with sort. Closes #15493 --- docs/reference/search/request/scroll.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc index 825564d799d..e18593d21cc 100644 --- a/docs/reference/search/request/scroll.asciidoc +++ b/docs/reference/search/request/scroll.asciidoc @@ -98,7 +98,7 @@ curl -XGET 'localhost:9200/_search?scroll=1m' -d ' { "sort": [ "_doc" - } + ] } ' -------------------------------------------------- From 17a806631d0b4afca80c457d04c0d7282cc94bef Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 16 Dec 2015 17:59:43 -0800 Subject: [PATCH 083/322] Test: Allow integ tests to exclude mock plugins We currently randomly add a set of mock plugins to integ tests. Sometimes it is necessary to omit this mock plugins, but other times you may just want to suppress a particular mock plugin. For example, if you have your own transport, you want to omit the asserting local transport mock, since they would both try to set the transport.type. --- .../java/org/elasticsearch/tribe/TribeIT.java | 9 +++++++- .../test/ESBackcompatTestCase.java | 8 +++++++ .../elasticsearch/test/ESIntegTestCase.java | 22 +++++++++++++++---- .../test/InternalTestCluster.java | 20 ++++++----------- .../test/NodeConfigurationSource.java | 20 +++++++++++++++++ .../ClusterDiscoveryConfiguration.java | 3 +++ .../test/test/InternalTestClusterTests.java | 8 +++---- 7 files changed, 68 insertions(+), 22 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 28a3dea118e..1350dcbb8ed 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; @@ -47,6 +48,7 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -79,6 +81,11 @@ public class TribeIT extends ESIntegTestCase { return Settings.builder().put(Node.HTTP_ENABLED, false).build(); } + @Override + public Collection> nodePlugins() { + return Collections.emptyList(); + } + @Override public Settings transportClientSettings() { return null; @@ -86,7 +93,7 @@ public class TribeIT extends ESIntegTestCase { }; cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2, - Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, true); + Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList()); cluster2.beforeTest(getRandom(), 0.1); cluster2.ensureAtLeastNumDataNodes(2); diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index ade424599fe..49644196da4 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.listeners.LoggingListener; @@ -40,6 +41,8 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; import java.util.Map; import static org.hamcrest.Matchers.is; @@ -175,6 +178,11 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { return externalNodeSettings(nodeOrdinal); } + @Override + public Collection> nodePlugins() { + return Collections.emptyList(); + } + @Override public Settings transportClientSettings() { return transportClientSettings(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index ba1c16d61ab..e4fb5e755ca 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -95,6 +95,7 @@ import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -111,13 +112,18 @@ import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.Node; +import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.client.RandomizingClient; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; +import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.test.transport.MockTransportService; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; import org.junit.After; @@ -128,6 +134,7 @@ import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; @@ -1806,14 +1813,21 @@ public abstract class ESIntegTestCase extends ESTestCase { nodeMode = "local"; } - boolean enableMockModules = enableMockModules(); + Collection> mockPlugins = getMockPlugins(); + return new InternalTestCluster(nodeMode, seed, createTempDir(), minNumDataNodes, maxNumDataNodes, InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), - InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, enableMockModules); + InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins); } - protected boolean enableMockModules() { - return RandomizedTest.systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true); + /** Return the mock plugins the cluster should use. These may be randomly omitted based on the cluster seed. */ + protected Collection> getMockPlugins() { + return pluginList(MockTransportService.TestPlugin.class, + MockFSIndexStore.TestPlugin.class, + NodeMocksPlugin.class, + MockEngineFactoryPlugin.class, + MockSearchService.TestPlugin.class, + AssertingLocalTransport.TestPlugin.class); } /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 7ae3226b66a..bc6db112d76 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -199,7 +199,7 @@ public final class InternalTestCluster extends TestCluster { private final ExecutorService executor; - private final boolean enableMockModules; + private final Collection> mockPlugins; /** * All nodes started by the cluster will have their name set to nodePrefix followed by a positive number @@ -212,7 +212,7 @@ public final class InternalTestCluster extends TestCluster { public InternalTestCluster(String nodeMode, long clusterSeed, Path baseDir, int minNumDataNodes, int maxNumDataNodes, String clusterName, NodeConfigurationSource nodeConfigurationSource, int numClientNodes, - boolean enableHttpPipelining, String nodePrefix, boolean enableMockModules) { + boolean enableHttpPipelining, String nodePrefix, Collection> mockPlugins) { super(clusterSeed); if ("network".equals(nodeMode) == false && "local".equals(nodeMode) == false) { throw new IllegalArgumentException("Unknown nodeMode: " + nodeMode); @@ -248,7 +248,7 @@ public final class InternalTestCluster extends TestCluster { this.nodePrefix = nodePrefix; assert nodePrefix != null; - this.enableMockModules = enableMockModules; + this.mockPlugins = mockPlugins; /* * TODO @@ -359,16 +359,10 @@ public final class InternalTestCluster extends TestCluster { private Collection> getPlugins(long seed) { Set> plugins = new HashSet<>(nodeConfigurationSource.nodePlugins()); - Random random = new Random(seed); - if (enableMockModules && usually(random)) { - plugins.add(MockTransportService.TestPlugin.class); - plugins.add(MockFSIndexStore.TestPlugin.class); - plugins.add(NodeMocksPlugin.class); - plugins.add(MockEngineFactoryPlugin.class); - plugins.add(MockSearchService.TestPlugin.class); - if (isLocalTransportConfigured()) { - plugins.add(AssertingLocalTransport.TestPlugin.class); - } + plugins.addAll(mockPlugins); + if (isLocalTransportConfigured() == false) { + // this is crazy we must do this here...we should really just always be using local transport... + plugins.remove(AssertingLocalTransport.TestPlugin.class); } return plugins; } diff --git a/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java b/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java index e04e840e525..5dfb845c192 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java +++ b/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java @@ -19,10 +19,18 @@ package org.elasticsearch.test; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MockEngineFactoryPlugin; +import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.MockSearchService; +import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.test.transport.MockTransportService; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.List; public abstract class NodeConfigurationSource { @@ -43,6 +51,18 @@ public abstract class NodeConfigurationSource { */ public abstract Settings nodeSettings(int nodeOrdinal); + /** Plugins that will be randomly added to the node */ + public Collection> mockPlugins() { + List> plugins = new ArrayList<>(); + plugins.add(MockTransportService.TestPlugin.class); + plugins.add(MockFSIndexStore.TestPlugin.class); + plugins.add(NodeMocksPlugin.class); + plugins.add(MockEngineFactoryPlugin.class); + plugins.add(MockSearchService.TestPlugin.class); + plugins.add(AssertingLocalTransport.TestPlugin.class); + return plugins; + } + /** Returns plugins that should be loaded on the node */ public Collection> nodePlugins() { return Collections.emptyList(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index e549c185616..484f65ea650 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -24,12 +24,15 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.Set; diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 1514e254a7f..af468fa084c 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -52,8 +52,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = randomRealisticUnicodeOfCodepointLengthBetween(1, 10); Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); // TODO: this is not ideal - we should have a way to make sure ports are initialized in the same way assertClusters(cluster0, cluster1, false); @@ -110,8 +110,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = "foobar"; Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); assertClusters(cluster0, cluster1, false); long seed = randomLong(); From 4523eaec889960ebaf183f3c4a943d47fc59d069 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 16 Dec 2015 12:36:13 -0800 Subject: [PATCH 084/322] Added plumbing for compile time script parameters. Closes #15464 --- .../TransportRenderSearchTemplateAction.java | 4 +- .../action/update/UpdateHelper.java | 3 +- .../index/query/QueryShardContext.java | 3 +- .../index/query/ScriptQueryBuilder.java | 5 +- .../script/ScriptScoreFunctionBuilder.java | 5 +- .../script/NativeScriptEngineService.java | 2 +- .../script/ScriptEngineService.java | 2 +- .../elasticsearch/script/ScriptService.java | 78 +++++++++++++------ .../elasticsearch/search/SearchService.java | 5 +- .../heuristics/ScriptHeuristic.java | 5 +- .../scripted/InternalScriptedMetric.java | 3 +- .../scripted/ScriptedMetricAggregator.java | 7 +- .../BucketScriptPipelineAggregator.java | 2 +- .../BucketSelectorPipelineAggregator.java | 3 +- .../support/ValuesSourceParser.java | 3 +- .../script/ScriptFieldsParseElement.java | 5 +- .../search/sort/ScriptSortParser.java | 3 +- .../suggest/phrase/PhraseSuggestParser.java | 3 +- .../elasticsearch/script/FileScriptTests.java | 4 +- .../script/NativeScriptTests.java | 7 +- .../script/ScriptContextTests.java | 14 ++-- .../script/ScriptModesTests.java | 2 +- .../script/ScriptServiceTests.java | 44 +++++------ .../aggregations/bucket/DateHistogramIT.java | 4 +- .../search/aggregations/metrics/AvgIT.java | 6 +- .../search/aggregations/metrics/SumIT.java | 6 +- .../aggregations/metrics/ValueCountIT.java | 4 +- .../org/elasticsearch/update/UpdateIT.java | 8 +- .../ExpressionScriptEngineService.java | 2 +- .../script/expression/ExpressionTests.java | 10 +-- .../groovy/GroovyScriptEngineService.java | 2 +- .../script/groovy/GroovySecurityTests.java | 6 +- .../mustache/MustacheScriptEngineService.java | 2 +- .../mustache/MustacheScriptEngineTests.java | 5 +- .../JavaScriptScriptEngineService.java | 6 +- .../JavaScriptScriptEngineTests.java | 25 +++--- .../JavaScriptScriptMultiThreadedTests.java | 7 +- .../javascript/JavaScriptSecurityTests.java | 13 ++-- .../script/javascript/SimpleBench.java | 3 +- .../plan/a/PlanAScriptEngineService.java | 29 +++++-- .../plan/a/FloatOverflowDisabledTests.java | 11 ++- .../plan/a/FloatOverflowEnabledTests.java | 11 ++- .../plan/a/IntegerOverflowDisabledTests.java | 11 ++- .../plan/a/IntegerOverflowEnabledTests.java | 11 ++- .../plan/a/ScriptEngineTests.java | 5 +- .../elasticsearch/plan/a/ScriptTestCase.java | 17 ++-- .../plan/a/WhenThingsGoWrongTests.java | 11 +++ .../python/PythonScriptEngineService.java | 6 +- .../python/PythonScriptEngineTests.java | 19 ++--- .../PythonScriptMultiThreadedTests.java | 5 +- .../script/python/PythonSecurityTests.java | 15 ++-- .../script/python/SimpleBench.java | 3 +- .../script/MockScriptEngine.java | 2 +- 53 files changed, 280 insertions(+), 197 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java index 5fe8297a6ba..f2bfb18c43f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java @@ -33,6 +33,8 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Collections; + public class TransportRenderSearchTemplateAction extends HandledTransportAction { private final ScriptService scriptService; @@ -55,7 +57,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction< @Override protected void doRun() throws Exception { - ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request); + ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request, Collections.emptyMap()); BytesReference processedTemplate = (BytesReference) executable.run(); RenderSearchTemplateResponse response = new RenderSearchTemplateResponse(); response.source(processedTemplate); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 9f8b2a2e7be..d28ba2986e2 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -245,7 +246,7 @@ public class UpdateHelper extends AbstractComponent { private Map executeScript(UpdateRequest request, Map ctx) { try { if (scriptService != null) { - ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request, Collections.emptyMap()); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 65dfb559e3f..faf482ead94 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -63,6 +63,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -364,7 +365,7 @@ public class QueryShardContext { * Executes the given template, and returns the response. */ public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) { - ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext); + ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext, Collections.emptyMap()); return (BytesReference) executable.run(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index f69ac8c0548..6f14f15d3f2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -33,6 +33,7 @@ import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.util.Collections; import java.util.Objects; public class ScriptQueryBuilder extends AbstractQueryBuilder { @@ -80,7 +81,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; - this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); + this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); } @Override @@ -161,4 +162,4 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder protected boolean doEquals(ScriptQueryBuilder other) { return Objects.equals(script, other.script); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java index 92308466312..5fcd70b65dc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java @@ -33,6 +33,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import java.io.IOException; +import java.util.Collections; import java.util.Objects; /** @@ -89,10 +90,10 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder params) { NativeScriptFactory scriptFactory = scripts.get(script); if (scriptFactory != null) { return scriptFactory; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java index 993c95ad797..41befc9406f 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java @@ -36,7 +36,7 @@ public interface ScriptEngineService extends Closeable { boolean sandboxed(); - Object compile(String script); + Object compile(String script, Map params); ExecutableScript executable(CompiledScript compiledScript, @Nullable Map vars); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 3b91f2d3110..c9e9f9a873d 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -67,6 +67,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -96,9 +97,9 @@ public class ScriptService extends AbstractComponent implements Closeable { private final Map scriptEnginesByLang; private final Map scriptEnginesByExt; - private final ConcurrentMap staticCache = ConcurrentCollections.newConcurrentMap(); + private final ConcurrentMap staticCache = ConcurrentCollections.newConcurrentMap(); - private final Cache cache; + private final Cache cache; private final Path scriptsDirectory; private final ScriptModes scriptModes; @@ -153,7 +154,7 @@ public class ScriptService extends AbstractComponent implements Closeable { this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, DEFAULT_LANG); - CacheBuilder cacheBuilder = CacheBuilder.builder(); + CacheBuilder cacheBuilder = CacheBuilder.builder(); if (cacheMaxSize >= 0) { cacheBuilder.setMaximumWeight(cacheMaxSize); } @@ -224,7 +225,7 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script. */ - public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) { + public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -252,14 +253,14 @@ public class ScriptService extends AbstractComponent implements Closeable { " operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported"); } - return compileInternal(script, headersContext); + return compileInternal(script, headersContext, params); } /** * Compiles a script straight-away, or returns the previously compiled and cached script, * without checking if it can be executed based on settings. */ - public CompiledScript compileInternal(Script script, HasContextAndHeaders context) { + public CompiledScript compileInternal(Script script, HasContextAndHeaders context, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -277,7 +278,7 @@ public class ScriptService extends AbstractComponent implements Closeable { ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang); if (type == ScriptType.FILE) { - String cacheKey = getCacheKey(scriptEngineService, name, null); + CacheKey cacheKey = new CacheKey(scriptEngineService, name, null, params); //On disk scripts will be loaded into the staticCache by the listener CompiledScript compiledScript = staticCache.get(cacheKey); @@ -299,14 +300,14 @@ public class ScriptService extends AbstractComponent implements Closeable { code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context); } - String cacheKey = getCacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code); + CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params); CompiledScript compiledScript = cache.get(cacheKey); if (compiledScript == null) { //Either an un-cached inline script or indexed script //If the script type is inline the name will be the same as the code for identification in exceptions try { - compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code)); + compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code, params)); } catch (Exception exception) { throw new ScriptException("Failed to compile " + type + " script [" + name + "] using lang [" + lang + "]", exception); } @@ -364,7 +365,7 @@ public class ScriptService extends AbstractComponent implements Closeable { //we don't know yet what the script will be used for, but if all of the operations for this lang with //indexed scripts are disabled, it makes no sense to even compile it. if (isAnyScriptContextEnabled(scriptLang, scriptEngineService, ScriptType.INDEXED)) { - Object compiled = scriptEngineService.compile(template.getScript()); + Object compiled = scriptEngineService.compile(template.getScript(), Collections.emptyMap()); if (compiled == null) { throw new IllegalArgumentException("Unable to parse [" + template.getScript() + "] lang [" + scriptLang + "] (ScriptService.compile returned null)"); @@ -436,8 +437,8 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Compiles (or retrieves from cache) and executes the provided script */ - public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) { - return executable(compile(script, scriptContext, headersContext), script.getParams()); + public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { + return executable(compile(script, scriptContext, headersContext, params), script.getParams()); } /** @@ -450,8 +451,8 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Compiles (or retrieves from cache) and executes the provided search script */ - public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext) { - CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current()); + public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map params) { + CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current(), params); return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams()); } @@ -491,9 +492,9 @@ public class ScriptService extends AbstractComponent implements Closeable { * {@code ScriptEngineService}'s {@code scriptRemoved} method when the * script has been removed from the cache */ - private class ScriptCacheRemovalListener implements RemovalListener { + private class ScriptCacheRemovalListener implements RemovalListener { @Override - public void onRemoval(RemovalNotification notification) { + public void onRemoval(RemovalNotification notification) { scriptMetrics.onCacheEviction(); for (ScriptEngineService service : scriptEngines) { try { @@ -539,8 +540,8 @@ public class ScriptService extends AbstractComponent implements Closeable { logger.info("compiling script file [{}]", file.toAbsolutePath()); try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { String script = Streams.copyToString(reader); - String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null); - staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script))); + CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()); + staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script, Collections.emptyMap()))); scriptMetrics.onCompilation(); } } else { @@ -565,7 +566,7 @@ public class ScriptService extends AbstractComponent implements Closeable { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); assert engineService != null; logger.info("removing script file [{}]", file.toAbsolutePath()); - staticCache.remove(getCacheKey(engineService, scriptNameExt.v1(), null)); + staticCache.remove(new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap())); } } @@ -625,11 +626,44 @@ public class ScriptService extends AbstractComponent implements Closeable { } } - private static String getCacheKey(ScriptEngineService scriptEngineService, String name, String code) { - String lang = scriptEngineService.types()[0]; - return lang + ":" + (name != null ? ":" + name : "") + (code != null ? ":" + code : ""); + private static final class CacheKey { + final String lang; + final String name; + final String code; + final Map params; + + private CacheKey(final ScriptEngineService service, final String name, final String code, final Map params) { + this.lang = service.types()[0]; + this.name = name; + this.code = code; + this.params = params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + CacheKey cacheKey = (CacheKey)o; + + if (!lang.equals(cacheKey.lang)) return false; + if (name != null ? !name.equals(cacheKey.name) : cacheKey.name != null) return false; + if (code != null ? !code.equals(cacheKey.code) : cacheKey.code != null) return false; + return params.equals(cacheKey.params); + + } + + @Override + public int hashCode() { + int result = lang.hashCode(); + result = 31 * result + (name != null ? name.hashCode() : 0); + result = 31 * result + (code != null ? code.hashCode() : 0); + result = 31 * result + params.hashCode(); + return result; + } } + private static class IndexedScript { private final String lang; private final String id; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index eb8414bb32e..00939a74cf8 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -91,6 +91,7 @@ import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -560,7 +561,7 @@ public class SearchService extends AbstractLifecycleComponent imp context.scrollContext().scroll = request.scroll(); } if (request.template() != null) { - ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context); + ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context, Collections.emptyMap()); BytesReference run = (BytesReference) executable.run(); try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) { QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry()); @@ -835,7 +836,7 @@ public class SearchService extends AbstractLifecycleComponent imp } if (source.scriptFields() != null) { for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) { - SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH); + SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH, Collections.emptyMap()); context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure())); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 046ca717b9f..b6d1d56d07b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -82,7 +83,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public void initialize(InternalAggregation.ReduceContext context) { - searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context); + searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); searchScript.setNextVar("_subset_freq", subsetDfHolder); searchScript.setNextVar("_subset_size", subsetSizeHolder); searchScript.setNextVar("_superset_freq", supersetDfHolder); @@ -170,7 +171,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { } ExecutableScript searchScript; try { - searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context); + searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); } catch (Exception e) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index d39a0335ac3..00c6b6b49bb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -91,7 +92,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement vars.putAll(firstAggregation.reduceScript.getParams()); } CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript, - ScriptContext.Standard.AGGS, reduceContext); + ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars); aggregation = script.run(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index 2c1caaa5241..6603c6289b2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -58,11 +59,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator { this.params = params; ScriptService scriptService = context.searchContext().scriptService(); if (initScript != null) { - scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext()).run(); + scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()).run(); } - this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS); + this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS, Collections.emptyMap()); if (combineScript != null) { - this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext()); + this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()); } else { this.combineScript = null; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 789f8c961a3..e5ccbf6971a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -90,7 +90,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java index 669a223b215..edc3b4e87ce 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -88,7 +89,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java index 506c9d16d7c..a9dcc77ee9f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java @@ -43,6 +43,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -227,7 +228,7 @@ public class ValuesSourceParser { } private SearchScript createScript() { - return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS); + return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS, Collections.emptyMap()); } private static ValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java index 6dbdcbd589a..de1703b5c98 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -97,9 +98,9 @@ public class ScriptFieldsParseElement implements SearchParseElement { throw new SearchParseException(context, "must specify a script in script fields", parser.getTokenLocation()); } - SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); + SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript, ignoreException)); } } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index c465eaf6efb..e4fe2c08f75 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -130,7 +131,7 @@ public class ScriptSortParser implements SortParser { if (type == null) { throw new SearchParseException(context, "_script sorting requires setting the type of the script", parser.getTokenLocation()); } - final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); + final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { throw new SearchParseException(context, "type [string] doesn't support mode [" + sortMode + "]", parser.getTokenLocation()); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 4bbdaf9c49e..9b083a91788 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import java.io.IOException; +import java.util.Collections; public final class PhraseSuggestParser implements SuggestContextParser { @@ -143,7 +144,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { } Template template = Template.parse(parser, parseFieldMatcher); CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, - headersContext); + headersContext, Collections.emptyMap()); suggestion.setCollateQueryScript(compiledScript); } else if ("params".equals(fieldName)) { suggestion.setCollateScriptParams(parser.map()); diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index fc888c79a8c..987aef90bc3 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -54,7 +54,7 @@ public class FileScriptTests extends ESTestCase { .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false).build(); ScriptService scriptService = makeScriptService(settings); Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders)); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); } public void testAllOpsDisabled() throws Exception { @@ -68,7 +68,7 @@ public class FileScriptTests extends ESTestCase { Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); for (ScriptContext context : ScriptContext.Standard.values()) { try { - scriptService.compile(script, context, contextAndHeaders); + scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); fail(context.getKey() + " script should have been rejected"); } catch(Exception e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled")); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 02fad319846..47adeabe02f 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -62,7 +63,7 @@ public class NativeScriptTests extends ESTestCase { ScriptService scriptService = injector.getInstance(ScriptService.class); ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), - ScriptContext.Standard.SEARCH, contextAndHeaders); + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); assertThat(executable.run().toString(), equalTo("test")); terminate(injector.getInstance(ThreadPool.class)); } @@ -88,7 +89,7 @@ public class NativeScriptTests extends ESTestCase { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext, - contextAndHeaders), notNullValue()); + contextAndHeaders, Collections.emptyMap()), notNullValue()); } } @@ -110,4 +111,4 @@ public class NativeScriptTests extends ESTestCase { return "test"; } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 0edaedbb28e..019eb7c74a0 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -58,7 +58,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); @@ -71,16 +71,16 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null); try { - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); } // still works for other script contexts - assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders)); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders)); - assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders)); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders, Collections.emptyMap())); } public void testUnknownPluginScriptContext() throws Exception { @@ -89,7 +89,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported")); @@ -109,7 +109,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, context, contextAndHeaders); + scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported")); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 3e476d2bebb..0afd72dab2b 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -252,7 +252,7 @@ public class ScriptModesTests extends ESTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return null; } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index e81b0894eca..ab325e9e0c9 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -132,7 +132,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly processed"); CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, contextAndHeaders); + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); logger.info("--> delete both files"); @@ -143,7 +143,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly removed"); try { scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, - contextAndHeaders); + contextAndHeaders, Collections.emptyMap()); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]")); @@ -154,9 +154,9 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -164,9 +164,9 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -175,9 +175,9 @@ public class ScriptServiceTests extends ESTestCase { buildScriptService(Settings.EMPTY); createFileScripts("test"); CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -338,7 +338,7 @@ public class ScriptServiceTests extends ESTestCase { for (String type : scriptEngineService.types()) { try { scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), contextAndHeaders); + pluginName, unknownContext), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); @@ -349,20 +349,20 @@ public class ScriptServiceTests extends ESTestCase { public void testCompileCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testSearchCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts)); + scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -372,7 +372,7 @@ public class ScriptServiceTests extends ESTestCase { int numberOfCompilations = randomIntBetween(1, 1024); for (int i = 0; i < numberOfCompilations; i++) { scriptService - .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); } @@ -382,8 +382,8 @@ public class ScriptServiceTests extends ESTestCase { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -391,14 +391,14 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); createFileScripts("test"); - scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -407,8 +407,8 @@ public class ScriptServiceTests extends ESTestCase { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); - scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } @@ -424,7 +424,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, HasContextAndHeaders contextAndHeaders) { try { - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders); + scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(ScriptException e) { //all good @@ -433,7 +433,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, HasContextAndHeaders contextAndHeaders) { - assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders), notNullValue()); + assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()), notNullValue()); } public static class TestEngineService implements ScriptEngineService { @@ -454,7 +454,7 @@ public class ScriptServiceTests extends ESTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return "compiled_" + script; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 9a1d498ad6b..97cd659a1d9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -1429,7 +1429,7 @@ public class DateHistogramIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -1555,7 +1555,7 @@ public class DateHistogramIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index ac146706eb5..db02d6ccb03 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -364,7 +364,7 @@ public class AvgIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -500,7 +500,7 @@ public class AvgIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -585,4 +585,4 @@ public class AvgIT extends AbstractNumericTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index d87de000108..2c27bde57dc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -359,7 +359,7 @@ public class SumIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -497,7 +497,7 @@ public class SumIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -583,4 +583,4 @@ public class SumIT extends AbstractNumericTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index fde7256ad01..c4611546493 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -244,7 +244,7 @@ public class ValueCountIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -330,4 +330,4 @@ public class ValueCountIT extends ESIntegTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index a789bb48774..09887d83541 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -120,7 +120,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } @@ -218,7 +218,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -309,7 +309,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } @@ -400,7 +400,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index a7f93925119..cf6017a32ca 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -95,7 +95,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { // classloader created here final SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java index b05b9630a14..198558381d3 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java @@ -33,23 +33,23 @@ public class ExpressionTests extends ESSingleNodeTestCase { public void testNeedsScores() { IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); - + ExpressionScriptEngineService service = new ExpressionScriptEngineService(Settings.EMPTY); SearchLookup lookup = new SearchLookup(index.mapperService(), index.fieldData(), null); - Object compiled = service.compile("1.2"); + Object compiled = service.compile("1.2", Collections.emptyMap()); SearchScript ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertFalse(ss.needsScores()); - compiled = service.compile("doc['d'].value"); + compiled = service.compile("doc['d'].value", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertFalse(ss.needsScores()); - compiled = service.compile("1/_score"); + compiled = service.compile("1/_score", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); - compiled = service.compile("doc['d'].value * _score"); + compiled = service.compile("doc['d'].value * _score", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); } diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 85f57694ce6..1ce5a2ab761 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -165,7 +165,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { try { // we reuse classloader, so do a security check just in case. SecurityManager sm = System.getSecurityManager(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 5f91631c021..8d9279ca0dd 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -99,7 +99,7 @@ public class GroovySecurityTests extends ESTestCase { // filtered directly by our classloader assertFailure("getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", PrivilegedActionException.class); // unfortunately, we have access to other classloaders (due to indy mechanism needing getClassLoader permission) - // but we can't do much with them directly at least. + // but we can't do much with them directly at least. assertFailure("myobject.getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", SecurityException.class); assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)", SecurityException.class); assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." + @@ -133,9 +133,9 @@ public class GroovySecurityTests extends ESTestCase { vars.put("myarray", Arrays.asList("foo")); vars.put("myobject", new MyObject()); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script, Collections.emptyMap())), vars).run(); } - + public static class MyObject { public int getPrimitive() { return 0; } public Object getObject() { return "value"; } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 93172056071..78fc6571f12 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -85,7 +85,7 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc * @return a compiled template object for later execution. * */ @Override - public Object compile(String template) { + public Object compile(String template, Map params) { /** Factory to generate Mustache objects from. */ return (new JsonEscapingMustacheFactory()).compile(new FastStringReader(template), "query-template"); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index ce29bf246be..8e8c8981493 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -28,6 +28,7 @@ import org.junit.Before; import java.io.IOException; import java.io.StringWriter; import java.nio.charset.Charset; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -52,7 +53,7 @@ public class MustacheScriptEngineTests extends ESTestCase { + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); @@ -63,7 +64,7 @@ public class MustacheScriptEngineTests extends ESTestCase { Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 33a4e55801b..825a8d358d9 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -62,7 +62,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements // one time initialization of rhino security manager integration private static final CodeSource DOMAIN; private static final int OPTIMIZATION_LEVEL = 1; - + static { try { DOMAIN = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); @@ -110,7 +110,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements if (securityDomain != DOMAIN) { throw new SecurityException("illegal securityDomain: " + securityDomain); } - + return super.createClassLoader(parent, securityDomain); } }); @@ -157,7 +157,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { Context ctx = Context.enter(); try { return ctx.compileString(script, generateScriptName(), 1, DOMAIN); diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java index fe9cc324f1c..9d8357bb582 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java @@ -29,6 +29,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,7 +55,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { public void testSimpleEquation() { Map vars = new HashMap(); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "js", se.compile("1 + 2")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "js", se.compile("1 + 2", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(3)); } @@ -64,20 +65,20 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map obj2 = MapBuilder.newMapBuilder().put("prop2", "value2").map(); Map obj1 = MapBuilder.newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Arrays.asList("2", "1")).map(); vars.put("obj1", obj1); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1", Collections.emptyMap())), vars).run(); assertThat(o, instanceOf(Map.class)); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1.l[0]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1.l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("2")); } public void testJavaScriptObjectToMap() { Map vars = new HashMap(); Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptObjectToMap", "js", - se.compile("var obj1 = {}; obj1.prop1 = 'value1'; obj1.obj2 = {}; obj1.obj2.prop2 = 'value2'; obj1")), vars).run(); + se.compile("var obj1 = {}; obj1.prop1 = 'value1'; obj1.obj2 = {}; obj1.obj2.prop2 = 'value2'; obj1", Collections.emptyMap())), vars).run(); Map obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); @@ -92,7 +93,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { vars.put("ctx", ctx); ExecutableScript executable = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptObjectMapInter", "js", - se.compile("ctx.obj2 = {}; ctx.obj2.prop2 = 'value2'; ctx.obj1.prop1 = 'uvalue1'")), vars); + se.compile("ctx.obj2 = {}; ctx.obj2.prop2 = 'value2'; ctx.obj1.prop1 = 'uvalue1'", Collections.emptyMap())), vars); executable.run(); ctx = (Map) executable.unwrap(vars.get("ctx")); assertThat(ctx.containsKey("obj1"), equalTo(true)); @@ -106,7 +107,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map doc = new HashMap(); ctx.put("doc", doc); - Object compiled = se.compile("ctx.doc.field1 = ['value1', 'value2']"); + Object compiled = se.compile("ctx.doc.field1 = ['value1', 'value2']", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptInnerArrayCreation", "js", compiled), new HashMap()); script.setNextVar("ctx", ctx); @@ -124,21 +125,21 @@ public class JavaScriptScriptEngineTests extends ESTestCase { vars.put("l", Arrays.asList("1", "2", "3", obj1)); Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l.length")), vars).run(); + se.compile("l.length", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(4)); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[0]")), vars).run(); + se.compile("l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("1")); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[3]")), vars).run(); + se.compile("l[3]", Collections.emptyMap())), vars).run(); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[3].prop1")), vars).run(); + se.compile("l[3].prop1", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("value1")); } @@ -146,7 +147,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map vars = new HashMap(); Map ctx = new HashMap(); vars.put("ctx", ctx); - Object compiledScript = se.compile("ctx.value"); + Object compiledScript = se.compile("ctx.value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "js", compiledScript), vars); @@ -161,7 +162,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap(); - Object compiledScript = se.compile("value"); + Object compiledScript = se.compile("value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "js", compiledScript), vars); diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java index 2308e666c51..2aa6e13a99f 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -40,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; @@ -82,7 +83,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableWithRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; @@ -124,7 +125,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecute() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java index c6f9805f818..dccc36d1bf7 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.mozilla.javascript.EcmaError; import org.mozilla.javascript.WrappedException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,7 +34,7 @@ import java.util.Map; * Tests for the Javascript security permissions */ public class JavaScriptSecurityTests extends ESTestCase { - + private JavaScriptScriptEngineService se; @Override @@ -53,14 +54,14 @@ public class JavaScriptSecurityTests extends ESTestCase { /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script, Collections.emptyMap())), vars).run(); } - + /** asserts that a script runs without exception */ private void assertSuccess(String script) { doTest(script); } - + /** assert that a security exception is hit */ private void assertFailure(String script, Class exceptionClass) { try { @@ -78,13 +79,13 @@ public class JavaScriptSecurityTests extends ESTestCase { } } } - + /** Test some javascripts that are ok */ public void testOK() { assertSuccess("1 + 2"); assertSuccess("Math.cos(Math.PI)"); } - + /** Test some javascripts that should hit security exception */ public void testNotOK() throws Exception { // sanity check :) diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java index bb7eb31c85d..3445c116057 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -35,7 +36,7 @@ public class SimpleBench { public static void main(String[] args) { JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y"); + Object compiled = se.compile("x + y", Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled); Map vars = new HashMap(); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java index 6b3cd834715..7795f74700b 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -28,6 +28,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; @@ -37,15 +38,16 @@ import java.security.AccessController; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; +import java.util.HashMap; import java.util.Map; public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { public static final String NAME = "plan-a"; - // TODO: this should really be per-script since scripts do so many different things? - private static final CompilerSettings compilerSettings = new CompilerSettings(); - - public static final String NUMERIC_OVERFLOW = "plan-a.numeric_overflow"; + // default settings, used unless otherwise specified + private static final CompilerSettings DEFAULT_COMPILER_SETTINGS = new CompilerSettings(); + + public static final String NUMERIC_OVERFLOW = "numeric_overflow"; // TODO: how should custom definitions be specified? private Definition definition = null; @@ -53,7 +55,6 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip @Inject public PlanAScriptEngineService(Settings settings) { super(settings); - compilerSettings.setNumericOverflow(settings.getAsBoolean(NUMERIC_OVERFLOW, compilerSettings.getNumericOverflow())); } public void setDefinition(final Definition definition) { @@ -86,7 +87,23 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { + final CompilerSettings compilerSettings; + if (params.isEmpty()) { + compilerSettings = DEFAULT_COMPILER_SETTINGS; + } else { + // custom settings + compilerSettings = new CompilerSettings(); + Map clone = new HashMap<>(params); + String value = clone.remove(NUMERIC_OVERFLOW); + if (value != null) { + // TODO: can we get a real boolean parser in here? + compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); + } + if (!clone.isEmpty()) { + throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + clone); + } + } // check we ourselves are not being called by unprivileged code SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java index 94beac0c58c..4603a669df2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests floating point overflow with numeric overflow disabled */ public class FloatOverflowDisabledTests extends ScriptTestCase { + /** wire overflow to false for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java index ff1c315628f..02a738de71e 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests floating point overflow with numeric overflow enabled */ public class FloatOverflowEnabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java index 279ea0616d9..dbffb11f0d0 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests integer overflow with numeric overflow disabled */ public class IntegerOverflowDisabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java index 8abd2695915..cdab0e89fe6 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests integer overflow with numeric overflow enabled */ public class IntegerOverflowEnabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java index d2bbe02a625..e5084392f99 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -78,7 +79,7 @@ public class ScriptEngineTests extends ScriptTestCase { Map ctx = new HashMap<>(); vars.put("ctx", ctx); - Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");"); + Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "plan-a", compiledScript), vars); @@ -93,7 +94,7 @@ public class ScriptEngineTests extends ScriptTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap<>(); - Object compiledScript = scriptEngine.compile("return input.get(\"value\");"); + Object compiledScript = scriptEngine.compile("return input.get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "plan-a", compiledScript), vars); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java index 253e37183f3..5b4948036f3 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Collections; import java.util.Map; /** @@ -34,17 +35,10 @@ import java.util.Map; */ public abstract class ScriptTestCase extends ESTestCase { protected PlanAScriptEngineService scriptEngine; - - /** Override to provide different compiler settings */ - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, random().nextBoolean()); - return builder.build(); - } @Before public void setup() { - scriptEngine = new PlanAScriptEngineService(getSettings()); + scriptEngine = new PlanAScriptEngineService(Settings.EMPTY); } /** Compiles and returns the result of {@code script} */ @@ -54,7 +48,12 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - Object object = scriptEngine.compile(script); + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + } + + /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ + public Object exec(String script, Map vars, Map compileParams) { + Object object = scriptEngine.compile(script, compileParams); CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "plan-a", object); return scriptEngine.executable(compiled, vars).run(); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java index de2c1c9ea3e..277778e7e76 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.plan.a; +import java.util.Collections; + public class WhenThingsGoWrongTests extends ScriptTestCase { public void testNullPointer() { try { @@ -38,4 +40,13 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { fail("should have hit cce"); } catch (ClassCastException expected) {} } + + public void testBogusParameter() { + try { + exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue")); + fail("should have hit IAE"); + } catch (IllegalArgumentException expected) { + assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); + } + } } diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index 1930f530671..c4f109cc782 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -60,7 +60,7 @@ import org.python.util.PythonInterpreter; public class PythonScriptEngineService extends AbstractComponent implements ScriptEngineService { private final PythonInterpreter interp; - + @Inject public PythonScriptEngineService(Settings settings) { super(settings); @@ -110,7 +110,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { // classloader created here SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -293,7 +293,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri if (value == null) { return null; } else if (value instanceof PyObject) { - // seems like this is enough, inner PyDictionary will do the conversion for us for example, so expose it directly + // seems like this is enough, inner PyDictionary will do the conversion for us for example, so expose it directly return ((PyObject) value).__tojava__(Object.class); } return value; diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java index e713bd67c92..a0bfab43c54 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java @@ -29,6 +29,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -53,7 +54,7 @@ public class PythonScriptEngineTests extends ESTestCase { public void testSimpleEquation() { Map vars = new HashMap(); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "python", se.compile("1 + 2")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "python", se.compile("1 + 2", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(3)); } @@ -63,13 +64,13 @@ public class PythonScriptEngineTests extends ESTestCase { Map obj2 = MapBuilder.newMapBuilder().put("prop2", "value2").map(); Map obj1 = MapBuilder.newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Arrays.asList("2", "1")).map(); vars.put("obj1", obj1); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1", Collections.emptyMap())), vars).run(); assertThat(o, instanceOf(Map.class)); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1['l'][0]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1['l'][0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("2")); } @@ -82,7 +83,7 @@ public class PythonScriptEngineTests extends ESTestCase { vars.put("ctx", ctx); ExecutableScript executable = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testObjectInterMap", "python", - se.compile("ctx['obj2'] = { 'prop2' : 'value2' }; ctx['obj1']['prop1'] = 'uvalue1'")), vars); + se.compile("ctx['obj2'] = { 'prop2' : 'value2' }; ctx['obj1']['prop1'] = 'uvalue1'", Collections.emptyMap())), vars); executable.run(); ctx = (Map) executable.unwrap(vars.get("ctx")); assertThat(ctx.containsKey("obj1"), equalTo(true)); @@ -100,15 +101,15 @@ public class PythonScriptEngineTests extends ESTestCase { // Object o = se.execute(se.compile("l.length"), vars); // assertThat(((Number) o).intValue(), equalTo(4)); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[0]")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("1")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]", Collections.emptyMap())), vars).run(); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]['prop1']")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]['prop1']", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("value1")); } @@ -116,7 +117,7 @@ public class PythonScriptEngineTests extends ESTestCase { Map vars = new HashMap(); Map ctx = new HashMap(); vars.put("ctx", ctx); - Object compiledScript = se.compile("ctx['value']"); + Object compiledScript = se.compile("ctx['value']", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "python", compiledScript), vars); ctx.put("value", 1); @@ -131,7 +132,7 @@ public class PythonScriptEngineTests extends ESTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap(); Map ctx = new HashMap(); - Object compiledScript = se.compile("value"); + Object compiledScript = se.compile("value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "python", compiledScript), vars); script.setNextVar("value", 1); diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java index 7b9663f6b6a..06d3da03ab8 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -41,7 +42,7 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "python", compiled); final AtomicBoolean failed = new AtomicBoolean(); @@ -127,7 +128,7 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { public void testExecute() throws Exception { final PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecute", "python", compiled); final AtomicBoolean failed = new AtomicBoolean(); diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java index e90ac503f13..22471129e82 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.python.core.PyException; import java.text.DecimalFormatSymbols; +import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -34,7 +35,7 @@ import java.util.Map; * Tests for Python security permissions */ public class PythonSecurityTests extends ESTestCase { - + private PythonScriptEngineService se; @Override @@ -54,14 +55,14 @@ public class PythonSecurityTests extends ESTestCase { /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "python", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "python", se.compile(script, Collections.emptyMap())), vars).run(); } - + /** asserts that a script runs without exception */ private void assertSuccess(String script) { doTest(script); } - + /** assert that a security exception is hit */ private void assertFailure(String script) { try { @@ -76,13 +77,13 @@ public class PythonSecurityTests extends ESTestCase { } } } - + /** Test some py scripts that are ok */ public void testOK() { assertSuccess("1 + 2"); assertSuccess("from java.lang import Math\nMath.cos(0)"); } - + /** Test some py scripts that should hit security exception */ public void testNotOK() { // sanity check :) @@ -93,7 +94,7 @@ public class PythonSecurityTests extends ESTestCase { // no files assertFailure("from java.io import File\nFile.createTempFile(\"test\", \"tmp\")"); } - + /** Test again from a new thread, python has complex threadlocal configuration */ public void testNotOKFromSeparateThread() throws Exception { Thread t = new Thread() { diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java index 60e792c34b5..d9559aef16c 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -35,7 +36,7 @@ public class SimpleBench { public static void main(String[] args) { PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y"); + Object compiled = se.compile("x + y", Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "SimpleBench", "python", compiled); diff --git a/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index bfd40900456..aec90aa50d4 100644 --- a/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -72,7 +72,7 @@ public class MockScriptEngine implements ScriptEngineService { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } From 6692e42d9ae944e3ebdd3ef96c4ef40dbbc6b87f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 17 Dec 2015 02:35:00 -0500 Subject: [PATCH 085/322] thirdPartyAudit round 2 This fixes the `lenient` parameter to be `missingClasses`. I will remove this boolean and we can handle them via the normal whitelist. It also adds a check for sheisty classes (jar hell with the jdk). This is inspired by the lucene "sheisty" classes check, but it has false positives. This check is more evil, it validates every class file against the extension classloader as a resource, to see if it exists there. If so: jar hell. This jar hell is a problem for several reasons: 1. causes insanely-hard-to-debug problems (like bugs in forbidden-apis) 2. hides problems (like internal api access) 3. the code you think is executing, is not really executing 4. security permissions are not what you think they are 5. brings in unnecessary dependencies 6. its jar hell The more difficult problems are stuff like jython, where these classes are simply 'uberjared' directly in, so you cant just fix them by removing a bogus dependency. And there is a legit reason for them to do that, they want to support java 1.4. --- .../precommit/ThirdPartyAuditTask.groovy | 71 +++- core/build.gradle | 2 +- modules/lang-expression/build.gradle | 2 +- modules/lang-groovy/build.gradle | 2 +- plugins/discovery-azure/build.gradle | 11 +- .../licenses/activation-1.1.jar.sha1 | 1 - .../licenses/activation-LICENSE.txt | 119 ------ .../licenses/activation-NOTICE.txt | 1 - .../licenses/jaxb-api-2.2.2.jar.sha1 | 1 - .../discovery-azure/licenses/stax-LICENSE.txt | 202 ----------- .../discovery-azure/licenses/stax-NOTICE.txt | 1 - .../licenses/stax-api-1.0-2.jar.sha1 | 1 - plugins/discovery-ec2/build.gradle | 2 +- plugins/discovery-gce/build.gradle | 2 +- plugins/lang-plan-a/build.gradle | 2 +- plugins/lang-python/build.gradle | 342 +++++++++++++++++- plugins/mapper-attachments/build.gradle | 3 +- .../licenses/stax-api-1.0.1.jar.sha1 | 1 - .../licenses/stax-api-LICENSE.txt | 202 ----------- .../licenses/stax-api-NOTICE.txt | 0 plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-s3/build.gradle | 2 +- qa/evil-tests/build.gradle | 2 +- test-framework/build.gradle | 2 +- 24 files changed, 416 insertions(+), 560 deletions(-) delete mode 100644 plugins/discovery-azure/licenses/activation-1.1.jar.sha1 delete mode 100644 plugins/discovery-azure/licenses/activation-LICENSE.txt delete mode 100644 plugins/discovery-azure/licenses/activation-NOTICE.txt delete mode 100644 plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 delete mode 100644 plugins/discovery-azure/licenses/stax-LICENSE.txt delete mode 100644 plugins/discovery-azure/licenses/stax-NOTICE.txt delete mode 100644 plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 delete mode 100644 plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 delete mode 100644 plugins/mapper-attachments/licenses/stax-api-LICENSE.txt delete mode 100644 plugins/mapper-attachments/licenses/stax-api-NOTICE.txt diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 86ac767d06c..83fe9115083 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -18,6 +18,12 @@ */ package org.elasticsearch.gradle.precommit +import java.nio.file.Files +import java.nio.file.FileVisitResult +import java.nio.file.Path +import java.nio.file.SimpleFileVisitor +import java.nio.file.attribute.BasicFileAttributes + import org.gradle.api.DefaultTask import org.gradle.api.artifacts.UnknownConfigurationException import org.gradle.api.file.FileCollection @@ -32,7 +38,7 @@ import org.apache.tools.ant.Project public class ThirdPartyAuditTask extends DefaultTask { // true to be lenient about MISSING CLASSES - private boolean lenient; + private boolean missingClasses; // patterns for classes to exclude, because we understand their issues private String[] excludes = new String[0]; @@ -43,19 +49,19 @@ public class ThirdPartyAuditTask extends DefaultTask { } /** - * Set to true to be lenient with dependencies. By default this check will fail if it finds + * Set to true to be lenient with missing classes. By default this check will fail if it finds * MISSING CLASSES. This means the set of jars is incomplete. However, in some cases * this can be due to intentional exclusions that are well-tested and understood. */ - public void setLenient(boolean value) { - lenient = value; + public void setMissingClasses(boolean value) { + missingClasses = value; } /** * Returns true if leniency about missing classes is enabled. */ - public boolean isLenient() { - return lenient; + public boolean isMissingClasses() { + return missingClasses; } /** @@ -118,9 +124,10 @@ public class ThirdPartyAuditTask extends DefaultTask { } logger.error("[thirdPartyAudit] Scanning: " + names) - // warn that you won't see any forbidden apis warnings - if (lenient) { - logger.warn("[thirdPartyAudit] WARNING: leniency is enabled, will not fail if classes are missing!") + // warn that classes are missing + // TODO: move these to excludes list! + if (missingClasses) { + logger.warn("[thirdPartyAudit] WARNING: CLASSES ARE MISSING! Expect NoClassDefFoundError in bug reports from users!") } // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, @@ -135,7 +142,7 @@ public class ThirdPartyAuditTask extends DefaultTask { for (File jar : jars) { ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()) } - + // convert exclusion class names to binary file names String[] excludedFiles = new String[excludes.length]; for (int i = 0; i < excludes.length; i++) { @@ -146,13 +153,55 @@ public class ThirdPartyAuditTask extends DefaultTask { } } + // jarHellReprise + checkSheistyClasses(tmpDir.toPath(), new HashSet<>(Arrays.asList(excludedFiles))); + ant.thirdPartyAudit(internalRuntimeForbidden: true, failOnUnsupportedJava: false, - failOnMissingClasses: !lenient, + failOnMissingClasses: !missingClasses, classpath: project.configurations.testCompile.asPath) { fileset(dir: tmpDir, excludes: excludedFiles.join(',')) } // clean up our mess (if we succeed) ant.delete(dir: tmpDir.getAbsolutePath()) } + + /** + * check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk! + */ + private void checkSheistyClasses(Path root, Set excluded) { + // system.parent = extensions loader. + // note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!). + // but groovy/gradle needs to work at all first! + ClassLoader ext = ClassLoader.getSystemClassLoader().getParent() + assert ext != null + + Set sheistySet = new TreeSet<>(); + Files.walkFileTree(root, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String entry = root.relativize(file).toString() + if (entry.endsWith(".class")) { + if (ext.getResource(entry) != null) { + sheistySet.add(entry); + } + } + return FileVisitResult.CONTINUE; + } + }); + + // check if we are ok + if (sheistySet.isEmpty()) { + return; + } + + // leniency against exclusions list + sheistySet.removeAll(excluded); + + if (sheistySet.isEmpty()) { + logger.warn("[thirdPartyAudit] WARNING: JAR HELL WITH JDK! Expect insanely hard-to-debug problems!") + } else { + throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet); + } + } } diff --git a/core/build.gradle b/core/build.gradle index 302757ccc8b..f4eb2c0aebd 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -112,7 +112,7 @@ forbiddenPatterns { } // classes are missing, e.g. org.jboss.marshalling.Marshaller -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true // uses internal sun ssl classes! thirdPartyAudit.excludes = [ // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index aac94570a36..5563fdafe36 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -35,7 +35,7 @@ dependencyLicenses { // do we or do we not depend on asm-tree, that is the question // classes are missing, e.g. org.objectweb.asm.tree.LabelNode -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true compileJava.options.compilerArgs << '-Xlint:-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 76686a760a2..7ffb5626d4a 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -37,7 +37,7 @@ integTest { } // classes are missing, e.g. jline.console.completer.Completer -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: sun.misc.Unsafe 'groovy.json.internal.FastStringUtils', diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 6f21364ea6d..ce80a441760 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -37,15 +37,12 @@ dependencies { compile "commons-lang:commons-lang:2.6" compile "commons-io:commons-io:2.4" compile 'javax.mail:mail:1.4.5' - compile 'javax.activation:activation:1.1' compile 'javax.inject:javax.inject:1' compile "com.sun.jersey:jersey-client:${versions.jersey}" compile "com.sun.jersey:jersey-core:${versions.jersey}" compile "com.sun.jersey:jersey-json:${versions.jersey}" compile 'org.codehaus.jettison:jettison:1.1' compile 'com.sun.xml.bind:jaxb-impl:2.2.3-1' - compile 'javax.xml.bind:jaxb-api:2.2.2' - compile 'javax.xml.stream:stax-api:1.0-2' compile 'org.codehaus.jackson:jackson-core-asl:1.9.2' compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2' compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2' @@ -57,7 +54,6 @@ dependencyLicenses { mapping from: /jackson-.*/, to: 'jackson' mapping from: /jersey-.*/, to: 'jersey' mapping from: /jaxb-.*/, to: 'jaxb' - mapping from: /stax-.*/, to: 'stax' } compileJava.options.compilerArgs << '-Xlint:-path,-serial,-static,-unchecked' @@ -67,9 +63,10 @@ compileJava.options.compilerArgs << '-Xlint:-deprecation' compileTestJava.options.compilerArgs << '-Xlint:-static' // classes are missing, e.g. org.osgi.framework.BundleActivator -thirdPartyAudit.lenient = true -// WE ARE JAR HELLING WITH THE JDK AND THAT IS WHY THIS HAPPENS -// TODO: fix this!!!!!!!!!!! +thirdPartyAudit.missingClasses = true +// TODO: figure out what is happening and fix this!!!!!!!!!!! +// there might be still some undetected jar hell! +// we need to fix https://github.com/policeman-tools/forbidden-apis/issues/91 first thirdPartyAudit.excludes = [ // uses internal java api: com.sun.xml.fastinfoset.stax.StAXDocumentParser 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector', diff --git a/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 b/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 deleted file mode 100644 index c4ee8fa5eb8..00000000000 --- a/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e6cb541461c2834bdea3eb920f1884d1eb508b50 diff --git a/plugins/discovery-azure/licenses/activation-LICENSE.txt b/plugins/discovery-azure/licenses/activation-LICENSE.txt deleted file mode 100644 index 1154e0aeec5..00000000000 --- a/plugins/discovery-azure/licenses/activation-LICENSE.txt +++ /dev/null @@ -1,119 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 - -1. Definitions. - -1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. - -1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. - -1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. - -1.4. Executable means the Covered Software in any form other than Source Code. - -1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. - -1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. - -1.7. License means this document. - -1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. - -1.9. Modifications means the Source Code and Executable form of any of the following: - -A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; - -B. Any new file that contains any part of the Original Software or previous Modification; or - -C. Any new file that is contributed or otherwise made available under the terms of this License. - -1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. - -1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. - -1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. - -1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a)áthe power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b)áownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. - -2. License Grants. - -2.1. The Initial Developer Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof). -(c) The licenses granted in Sectionsá2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License. -(d) Notwithstanding Sectioná2.1(b) above, no patent license is granted: (1)áfor code that You delete from the Original Software, or (2)áfor infringements caused by: (i)áthe modification of the Original Software, or (ii)áthe combination of the Original Software with other software or devices. - -2.2. Contributor Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1)áModifications made by that Contributor (or portions thereof); and (2)áthe combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). -(c) The licenses granted in Sectionsá2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. -(d) Notwithstanding Sectioná2.2(b) above, no patent license is granted: (1)áfor any code that Contributor has deleted from the Contributor Version; (2)áfor infringements caused by: (i)áthird party modifications of Contributor Version, or (ii)áthe combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3)áunder Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. - -3. Distribution Obligations. - -3.1. Availability of Source Code. - -Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. - -3.2. Modifications. - -The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. - -3.3. Required Notices. -You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. - -3.4. Application of Additional Terms. -You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. - -3.5. Distribution of Executable Versions. -You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. - -3.6. Larger Works. -You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. - -4. Versions of the License. - -4.1. New Versions. -Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. - -4.2. Effect of New Versions. - -You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. -4.3. Modified Versions. - -When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a)árename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b)áotherwise make it clear that the license contains terms which differ from this License. - -5. DISCLAIMER OF WARRANTY. - -COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - -6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. - -6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sectionsá2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. - -6.3. In the event of termination under Sectionsá6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - -UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - -The Covered Software is a commercial item, as that term is defined in 48áC.F.R.á2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. á252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48áC.F.R.á12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. - -9. MISCELLANEOUS. - -This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - -As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) -The GlassFish code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. - - - diff --git a/plugins/discovery-azure/licenses/activation-NOTICE.txt b/plugins/discovery-azure/licenses/activation-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/discovery-azure/licenses/activation-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 deleted file mode 100644 index a145d47cec9..00000000000 --- a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aeb3021ca93dde265796d82015beecdcff95bf09 diff --git a/plugins/discovery-azure/licenses/stax-LICENSE.txt b/plugins/discovery-azure/licenses/stax-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/plugins/discovery-azure/licenses/stax-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugins/discovery-azure/licenses/stax-NOTICE.txt b/plugins/discovery-azure/licenses/stax-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/discovery-azure/licenses/stax-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 b/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 deleted file mode 100644 index fb00ad889b6..00000000000 --- a/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6337b0de8b25e53e81b922352fbea9f9f57ba0b diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 14767c6783b..355dbc55164 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -50,7 +50,7 @@ test { } // classes are missing, e.g. org.apache.avalon.framework.logger.Logger -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 2ec479a3e80..b054e0f37b8 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -33,4 +33,4 @@ test { } // classes are missing, e.g. org.apache.log.Logger -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index c23c3a30efc..5f0ddafcc97 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -34,7 +34,7 @@ compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-unchecked' // classes are missing, e.g. org.objectweb.asm.tree.LabelNode -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true // regeneration logic, comes in via ant right now // don't port it to gradle, it works fine. diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index a7faedf6a78..1c33ad2d5ee 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -37,7 +37,7 @@ integTest { } // classes are missing, e.g. org.tukaani.xz.FilterOptions -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', @@ -72,4 +72,344 @@ thirdPartyAudit.excludes = [ 'org.python.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', 'org.python.netty.util.internal.chmv8.ForkJoinTask', 'org.python.netty.util.internal.chmv8.ForkJoinTask$1', + + // "uberjaring" (but not shading) classes that have been in the JDK since 1.5 + // nice job python. + 'javax.xml.XMLConstants', + 'javax.xml.datatype.DatatypeConfigurationException', + 'javax.xml.datatype.DatatypeConstants$1', + 'javax.xml.datatype.DatatypeConstants$Field', + 'javax.xml.datatype.DatatypeConstants', + 'javax.xml.datatype.DatatypeFactory', + 'javax.xml.datatype.Duration', + 'javax.xml.datatype.FactoryFinder', + 'javax.xml.datatype.SecuritySupport$1', + 'javax.xml.datatype.SecuritySupport$2', + 'javax.xml.datatype.SecuritySupport$3', + 'javax.xml.datatype.SecuritySupport$4', + 'javax.xml.datatype.SecuritySupport$5', + 'javax.xml.datatype.SecuritySupport', + 'javax.xml.datatype.XMLGregorianCalendar', + 'javax.xml.namespace.NamespaceContext', + 'javax.xml.namespace.QName$1', + 'javax.xml.namespace.QName', + 'javax.xml.parsers.DocumentBuilder', + 'javax.xml.parsers.DocumentBuilderFactory', + 'javax.xml.parsers.FactoryConfigurationError', + 'javax.xml.parsers.FactoryFinder', + 'javax.xml.parsers.ParserConfigurationException', + 'javax.xml.parsers.SAXParser', + 'javax.xml.parsers.SAXParserFactory', + 'javax.xml.parsers.SecuritySupport$1', + 'javax.xml.parsers.SecuritySupport$2', + 'javax.xml.parsers.SecuritySupport$3', + 'javax.xml.parsers.SecuritySupport$4', + 'javax.xml.parsers.SecuritySupport$5', + 'javax.xml.parsers.SecuritySupport', + 'javax.xml.stream.EventFilter', + 'javax.xml.stream.FactoryConfigurationError', + 'javax.xml.stream.FactoryFinder', + 'javax.xml.stream.Location', + 'javax.xml.stream.SecuritySupport$1', + 'javax.xml.stream.SecuritySupport$2', + 'javax.xml.stream.SecuritySupport$3', + 'javax.xml.stream.SecuritySupport$4', + 'javax.xml.stream.SecuritySupport$5', + 'javax.xml.stream.SecuritySupport', + 'javax.xml.stream.StreamFilter', + 'javax.xml.stream.XMLEventFactory', + 'javax.xml.stream.XMLEventReader', + 'javax.xml.stream.XMLEventWriter', + 'javax.xml.stream.XMLInputFactory', + 'javax.xml.stream.XMLOutputFactory', + 'javax.xml.stream.XMLReporter', + 'javax.xml.stream.XMLResolver', + 'javax.xml.stream.XMLStreamConstants', + 'javax.xml.stream.XMLStreamException', + 'javax.xml.stream.XMLStreamReader', + 'javax.xml.stream.XMLStreamWriter', + 'javax.xml.stream.events.Attribute', + 'javax.xml.stream.events.Characters', + 'javax.xml.stream.events.Comment', + 'javax.xml.stream.events.DTD', + 'javax.xml.stream.events.EndDocument', + 'javax.xml.stream.events.EndElement', + 'javax.xml.stream.events.EntityDeclaration', + 'javax.xml.stream.events.EntityReference', + 'javax.xml.stream.events.Namespace', + 'javax.xml.stream.events.NotationDeclaration', + 'javax.xml.stream.events.ProcessingInstruction', + 'javax.xml.stream.events.StartDocument', + 'javax.xml.stream.events.StartElement', + 'javax.xml.stream.events.XMLEvent', + 'javax.xml.stream.util.EventReaderDelegate', + 'javax.xml.stream.util.StreamReaderDelegate', + 'javax.xml.stream.util.XMLEventAllocator', + 'javax.xml.stream.util.XMLEventConsumer', + 'javax.xml.transform.ErrorListener', + 'javax.xml.transform.FactoryFinder', + 'javax.xml.transform.OutputKeys', + 'javax.xml.transform.Result', + 'javax.xml.transform.SecuritySupport$1', + 'javax.xml.transform.SecuritySupport$2', + 'javax.xml.transform.SecuritySupport$3', + 'javax.xml.transform.SecuritySupport$4', + 'javax.xml.transform.SecuritySupport$5', + 'javax.xml.transform.SecuritySupport', + 'javax.xml.transform.Source', + 'javax.xml.transform.SourceLocator', + 'javax.xml.transform.Templates', + 'javax.xml.transform.Transformer', + 'javax.xml.transform.TransformerConfigurationException', + 'javax.xml.transform.TransformerException', + 'javax.xml.transform.TransformerFactory', + 'javax.xml.transform.TransformerFactoryConfigurationError', + 'javax.xml.transform.URIResolver', + 'javax.xml.transform.dom.DOMLocator', + 'javax.xml.transform.dom.DOMResult', + 'javax.xml.transform.dom.DOMSource', + 'javax.xml.transform.sax.SAXResult', + 'javax.xml.transform.sax.SAXSource', + 'javax.xml.transform.sax.SAXTransformerFactory', + 'javax.xml.transform.sax.TemplatesHandler', + 'javax.xml.transform.sax.TransformerHandler', + 'javax.xml.transform.stax.StAXResult', + 'javax.xml.transform.stax.StAXSource', + 'javax.xml.transform.stream.StreamResult', + 'javax.xml.transform.stream.StreamSource', + 'javax.xml.validation.Schema', + 'javax.xml.validation.SchemaFactory', + 'javax.xml.validation.SchemaFactoryFinder$1', + 'javax.xml.validation.SchemaFactoryFinder$2', + 'javax.xml.validation.SchemaFactoryFinder', + 'javax.xml.validation.SchemaFactoryLoader', + 'javax.xml.validation.SecuritySupport$1', + 'javax.xml.validation.SecuritySupport$2', + 'javax.xml.validation.SecuritySupport$3', + 'javax.xml.validation.SecuritySupport$4', + 'javax.xml.validation.SecuritySupport$5', + 'javax.xml.validation.SecuritySupport$6', + 'javax.xml.validation.SecuritySupport$7', + 'javax.xml.validation.SecuritySupport$8', + 'javax.xml.validation.SecuritySupport', + 'javax.xml.validation.TypeInfoProvider', + 'javax.xml.validation.Validator', + 'javax.xml.validation.ValidatorHandler', + 'javax.xml.xpath.SecuritySupport$1', + 'javax.xml.xpath.SecuritySupport$2', + 'javax.xml.xpath.SecuritySupport$3', + 'javax.xml.xpath.SecuritySupport$4', + 'javax.xml.xpath.SecuritySupport$5', + 'javax.xml.xpath.SecuritySupport$6', + 'javax.xml.xpath.SecuritySupport$7', + 'javax.xml.xpath.SecuritySupport$8', + 'javax.xml.xpath.SecuritySupport', + 'javax.xml.xpath.XPath', + 'javax.xml.xpath.XPathConstants', + 'javax.xml.xpath.XPathException', + 'javax.xml.xpath.XPathExpression', + 'javax.xml.xpath.XPathExpressionException', + 'javax.xml.xpath.XPathFactory', + 'javax.xml.xpath.XPathFactoryConfigurationException', + 'javax.xml.xpath.XPathFactoryFinder$1', + 'javax.xml.xpath.XPathFactoryFinder$2', + 'javax.xml.xpath.XPathFactoryFinder', + 'javax.xml.xpath.XPathFunction', + 'javax.xml.xpath.XPathFunctionException', + 'javax.xml.xpath.XPathFunctionResolver', + 'javax.xml.xpath.XPathVariableResolver', + 'org.w3c.dom.Attr', + 'org.w3c.dom.CDATASection', + 'org.w3c.dom.CharacterData', + 'org.w3c.dom.Comment', + 'org.w3c.dom.DOMConfiguration', + 'org.w3c.dom.DOMError', + 'org.w3c.dom.DOMErrorHandler', + 'org.w3c.dom.DOMException', + 'org.w3c.dom.DOMImplementation', + 'org.w3c.dom.DOMImplementationList', + 'org.w3c.dom.DOMImplementationSource', + 'org.w3c.dom.DOMLocator', + 'org.w3c.dom.DOMStringList', + 'org.w3c.dom.Document', + 'org.w3c.dom.DocumentFragment', + 'org.w3c.dom.DocumentType', + 'org.w3c.dom.Element', + 'org.w3c.dom.Entity', + 'org.w3c.dom.EntityReference', + 'org.w3c.dom.NameList', + 'org.w3c.dom.NamedNodeMap', + 'org.w3c.dom.Node', + 'org.w3c.dom.NodeList', + 'org.w3c.dom.Notation', + 'org.w3c.dom.ProcessingInstruction', + 'org.w3c.dom.Text', + 'org.w3c.dom.TypeInfo', + 'org.w3c.dom.UserDataHandler', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$1', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$2', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$3', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$4', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry', + 'org.w3c.dom.css.CSS2Properties', + 'org.w3c.dom.css.CSSCharsetRule', + 'org.w3c.dom.css.CSSFontFaceRule', + 'org.w3c.dom.css.CSSImportRule', + 'org.w3c.dom.css.CSSMediaRule', + 'org.w3c.dom.css.CSSPageRule', + 'org.w3c.dom.css.CSSPrimitiveValue', + 'org.w3c.dom.css.CSSRule', + 'org.w3c.dom.css.CSSRuleList', + 'org.w3c.dom.css.CSSStyleDeclaration', + 'org.w3c.dom.css.CSSStyleRule', + 'org.w3c.dom.css.CSSStyleSheet', + 'org.w3c.dom.css.CSSUnknownRule', + 'org.w3c.dom.css.CSSValue', + 'org.w3c.dom.css.CSSValueList', + 'org.w3c.dom.css.Counter', + 'org.w3c.dom.css.DOMImplementationCSS', + 'org.w3c.dom.css.DocumentCSS', + 'org.w3c.dom.css.ElementCSSInlineStyle', + 'org.w3c.dom.css.RGBColor', + 'org.w3c.dom.css.Rect', + 'org.w3c.dom.css.ViewCSS', + 'org.w3c.dom.events.DocumentEvent', + 'org.w3c.dom.events.Event', + 'org.w3c.dom.events.EventException', + 'org.w3c.dom.events.EventListener', + 'org.w3c.dom.events.EventTarget', + 'org.w3c.dom.events.MouseEvent', + 'org.w3c.dom.events.MutationEvent', + 'org.w3c.dom.events.UIEvent', + 'org.w3c.dom.html.HTMLAnchorElement', + 'org.w3c.dom.html.HTMLAppletElement', + 'org.w3c.dom.html.HTMLAreaElement', + 'org.w3c.dom.html.HTMLBRElement', + 'org.w3c.dom.html.HTMLBaseElement', + 'org.w3c.dom.html.HTMLBaseFontElement', + 'org.w3c.dom.html.HTMLBodyElement', + 'org.w3c.dom.html.HTMLButtonElement', + 'org.w3c.dom.html.HTMLCollection', + 'org.w3c.dom.html.HTMLDListElement', + 'org.w3c.dom.html.HTMLDOMImplementation', + 'org.w3c.dom.html.HTMLDirectoryElement', + 'org.w3c.dom.html.HTMLDivElement', + 'org.w3c.dom.html.HTMLDocument', + 'org.w3c.dom.html.HTMLElement', + 'org.w3c.dom.html.HTMLFieldSetElement', + 'org.w3c.dom.html.HTMLFontElement', + 'org.w3c.dom.html.HTMLFormElement', + 'org.w3c.dom.html.HTMLFrameElement', + 'org.w3c.dom.html.HTMLFrameSetElement', + 'org.w3c.dom.html.HTMLHRElement', + 'org.w3c.dom.html.HTMLHeadElement', + 'org.w3c.dom.html.HTMLHeadingElement', + 'org.w3c.dom.html.HTMLHtmlElement', + 'org.w3c.dom.html.HTMLIFrameElement', + 'org.w3c.dom.html.HTMLImageElement', + 'org.w3c.dom.html.HTMLInputElement', + 'org.w3c.dom.html.HTMLIsIndexElement', + 'org.w3c.dom.html.HTMLLIElement', + 'org.w3c.dom.html.HTMLLabelElement', + 'org.w3c.dom.html.HTMLLegendElement', + 'org.w3c.dom.html.HTMLLinkElement', + 'org.w3c.dom.html.HTMLMapElement', + 'org.w3c.dom.html.HTMLMenuElement', + 'org.w3c.dom.html.HTMLMetaElement', + 'org.w3c.dom.html.HTMLModElement', + 'org.w3c.dom.html.HTMLOListElement', + 'org.w3c.dom.html.HTMLObjectElement', + 'org.w3c.dom.html.HTMLOptGroupElement', + 'org.w3c.dom.html.HTMLOptionElement', + 'org.w3c.dom.html.HTMLParagraphElement', + 'org.w3c.dom.html.HTMLParamElement', + 'org.w3c.dom.html.HTMLPreElement', + 'org.w3c.dom.html.HTMLQuoteElement', + 'org.w3c.dom.html.HTMLScriptElement', + 'org.w3c.dom.html.HTMLSelectElement', + 'org.w3c.dom.html.HTMLStyleElement', + 'org.w3c.dom.html.HTMLTableCaptionElement', + 'org.w3c.dom.html.HTMLTableCellElement', + 'org.w3c.dom.html.HTMLTableColElement', + 'org.w3c.dom.html.HTMLTableElement', + 'org.w3c.dom.html.HTMLTableRowElement', + 'org.w3c.dom.html.HTMLTableSectionElement', + 'org.w3c.dom.html.HTMLTextAreaElement', + 'org.w3c.dom.html.HTMLTitleElement', + 'org.w3c.dom.html.HTMLUListElement', + 'org.w3c.dom.ls.DOMImplementationLS', + 'org.w3c.dom.ls.LSException', + 'org.w3c.dom.ls.LSInput', + 'org.w3c.dom.ls.LSLoadEvent', + 'org.w3c.dom.ls.LSOutput', + 'org.w3c.dom.ls.LSParser', + 'org.w3c.dom.ls.LSParserFilter', + 'org.w3c.dom.ls.LSProgressEvent', + 'org.w3c.dom.ls.LSResourceResolver', + 'org.w3c.dom.ls.LSSerializer', + 'org.w3c.dom.ls.LSSerializerFilter', + 'org.w3c.dom.ranges.DocumentRange', + 'org.w3c.dom.ranges.Range', + 'org.w3c.dom.ranges.RangeException', + 'org.w3c.dom.stylesheets.DocumentStyle', + 'org.w3c.dom.stylesheets.LinkStyle', + 'org.w3c.dom.stylesheets.MediaList', + 'org.w3c.dom.stylesheets.StyleSheet', + 'org.w3c.dom.stylesheets.StyleSheetList', + 'org.w3c.dom.traversal.DocumentTraversal', + 'org.w3c.dom.traversal.NodeFilter', + 'org.w3c.dom.traversal.NodeIterator', + 'org.w3c.dom.traversal.TreeWalker', + 'org.w3c.dom.views.AbstractView', + 'org.w3c.dom.views.DocumentView', + 'org.w3c.dom.xpath.XPathEvaluator', + 'org.w3c.dom.xpath.XPathException', + 'org.w3c.dom.xpath.XPathExpression', + 'org.w3c.dom.xpath.XPathNSResolver', + 'org.w3c.dom.xpath.XPathNamespace', + 'org.w3c.dom.xpath.XPathResult', + 'org.xml.sax.AttributeList', + 'org.xml.sax.Attributes', + 'org.xml.sax.ContentHandler', + 'org.xml.sax.DTDHandler', + 'org.xml.sax.DocumentHandler', + 'org.xml.sax.EntityResolver', + 'org.xml.sax.ErrorHandler', + 'org.xml.sax.HandlerBase', + 'org.xml.sax.InputSource', + 'org.xml.sax.Locator', + 'org.xml.sax.Parser', + 'org.xml.sax.SAXException', + 'org.xml.sax.SAXNotRecognizedException', + 'org.xml.sax.SAXNotSupportedException', + 'org.xml.sax.SAXParseException', + 'org.xml.sax.XMLFilter', + 'org.xml.sax.XMLReader', + 'org.xml.sax.ext.Attributes2', + 'org.xml.sax.ext.Attributes2Impl', + 'org.xml.sax.ext.DeclHandler', + 'org.xml.sax.ext.DefaultHandler2', + 'org.xml.sax.ext.EntityResolver2', + 'org.xml.sax.ext.LexicalHandler', + 'org.xml.sax.ext.Locator2', + 'org.xml.sax.ext.Locator2Impl', + 'org.xml.sax.helpers.AttributeListImpl', + 'org.xml.sax.helpers.AttributesImpl', + 'org.xml.sax.helpers.DefaultHandler', + 'org.xml.sax.helpers.LocatorImpl', + 'org.xml.sax.helpers.NamespaceSupport$Context', + 'org.xml.sax.helpers.NamespaceSupport', + 'org.xml.sax.helpers.NewInstance', + 'org.xml.sax.helpers.ParserAdapter$AttributeListAdapter', + 'org.xml.sax.helpers.ParserAdapter', + 'org.xml.sax.helpers.ParserFactory', + 'org.xml.sax.helpers.SecuritySupport$1', + 'org.xml.sax.helpers.SecuritySupport$2', + 'org.xml.sax.helpers.SecuritySupport$3', + 'org.xml.sax.helpers.SecuritySupport$4', + 'org.xml.sax.helpers.SecuritySupport', + 'org.xml.sax.helpers.XMLFilterImpl', + 'org.xml.sax.helpers.XMLReaderAdapter$AttributesAdapter', + 'org.xml.sax.helpers.XMLReaderAdapter', + 'org.xml.sax.helpers.XMLReaderFactory', ] diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index f3c414a0718..58f2dceb740 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -55,7 +55,6 @@ dependencies { compile "org.apache.poi:poi-ooxml-schemas:${versions.poi}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile 'org.apache.xmlbeans:xmlbeans:2.6.0' - compile 'stax:stax-api:1.0.1' // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork @@ -71,7 +70,7 @@ forbiddenPatterns { } // classes are missing, e.g. org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: com.sun.syndication (SyndFeedInput, SyndFeed, SyndEntry, SyndContent) 'org.apache.tika.parser.feed.FeedParser', diff --git a/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 b/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 deleted file mode 100644 index 4426e34685d..00000000000 --- a/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49c100caf72d658aca8e58bd74a4ba90fa2b0d70 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/stax-api-LICENSE.txt b/plugins/mapper-attachments/licenses/stax-api-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/plugins/mapper-attachments/licenses/stax-api-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugins/mapper-attachments/licenses/stax-api-NOTICE.txt b/plugins/mapper-attachments/licenses/stax-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index c09a3ff4d67..ca444768590 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -203,4 +203,4 @@ integTest { } // classes are missing, e.g. org.mockito.Mockito -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 82797788f8e..90e4dd2d956 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -51,7 +51,7 @@ test { } // classes are missing, e.g. org.apache.log.Logger -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 02ed75fccbb..3782f368af4 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -36,7 +36,7 @@ test { } // classes are missing, com.ibm.icu.lang.UCharacter -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ // uses internal java api: sun.misc.Unsafe 'com.google.common.cache.Striped64', diff --git a/test-framework/build.gradle b/test-framework/build.gradle index 2263413bbde..6930abb3d23 100644 --- a/test-framework/build.gradle +++ b/test-framework/build.gradle @@ -48,4 +48,4 @@ forbiddenApisMain { dependencyLicenses.enabled = false // we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! -thirdPartyAudit.lenient = true +thirdPartyAudit.missingClasses = true From c1d8d3b28b288c6c2f5b9ada2b3e356e18734a53 Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Thu, 17 Dec 2015 05:26:54 -0500 Subject: [PATCH 086/322] AllTermQuery's scorer should skip segments that never saw the requested term --- .../common/lucene/all/AllTermQuery.java | 4 +++ .../common/lucene/all/SimpleAllTests.java | 35 +++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 7191c96e33e..4fe90aed9e4 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -149,6 +149,10 @@ public final class AllTermQuery extends Query { return null; } final TermState state = termStates.get(context.ord); + if (state == null) { + // Term does not exist in this segment + return null; + } termsEnum.seekExact(term.bytes(), state); PostingsEnum docs = termsEnum.postings(null, PostingsEnum.PAYLOADS); assert docs != null; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java index f4f3034528f..7ee238ae7f2 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java @@ -220,6 +220,41 @@ public class SimpleAllTests extends ESTestCase { indexWriter.close(); } + public void testTermMissingFromOneSegment() throws Exception { + Directory dir = new RAMDirectory(); + IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); + + Document doc = new Document(); + doc.add(new Field("_id", "1", StoredField.TYPE)); + AllEntries allEntries = new AllEntries(); + allEntries.addText("field", "something", 2.0f); + allEntries.reset(); + doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER))); + + indexWriter.addDocument(doc); + indexWriter.commit(); + + doc = new Document(); + doc.add(new Field("_id", "2", StoredField.TYPE)); + allEntries = new AllEntries(); + allEntries.addText("field", "else", 1.0f); + allEntries.reset(); + doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER))); + + indexWriter.addDocument(doc); + + IndexReader reader = DirectoryReader.open(indexWriter, true); + assertEquals(2, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + // "something" only appears in the first segment: + Query query = new AllTermQuery(new Term("_all", "something")); + TopDocs docs = searcher.search(query, 10); + assertEquals(1, docs.totalHits); + + indexWriter.close(); + } + public void testMultipleTokensAllNoBoost() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); From 86d6e28b7f40aec6d50e7af7208fa156df51d90a Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 17 Dec 2015 13:03:08 +0100 Subject: [PATCH 087/322] MetaDataMappingService should call MapperService.merge with the original mapping update. Currently MetaDataMappingService parses the mapping updates, reserializes it and finally calls MapperService.merge with the serialized mapping. Given that mapping serialization only writes differences from the default, this is a bit unfair to parsers since they can't know whether some option has been explicitly set or not. Furthermore this can cause bugs with metadata fields given that these fields use existing field types as defaults. This commit changes MetaDataMappingService to call MapperService.merge with the original mapping update. --- .../metadata/MetaDataMappingService.java | 37 +++--- .../index/mapper/MapperService.java | 109 +++++++++--------- 2 files changed, 72 insertions(+), 74 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index bbaeb5a11d7..8093d93ccce 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -236,8 +236,8 @@ public class MetaDataMappingService extends AbstractComponent { } private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { - Map newMappers = new HashMap<>(); - Map existingMappers = new HashMap<>(); + String mappingType = request.type(); + CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); for (String index : request.indices()) { IndexService indexService = indicesService.indexServiceSafe(index); // try and parse it (no need to add it here) so we can bail early in case of parsing exception @@ -245,9 +245,9 @@ public class MetaDataMappingService extends AbstractComponent { DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); + newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, false); } else { - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); + newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null); if (existingMapper != null) { // first, simulate // this will just throw exceptions in case of problems @@ -270,36 +270,31 @@ public class MetaDataMappingService extends AbstractComponent { } } } - newMappers.put(index, newMapper); - if (existingMapper != null) { - existingMappers.put(index, existingMapper); + if (mappingType == null) { + mappingType = newMapper.type(); + } else if (mappingType.equals(newMapper.type()) == false) { + throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); } } + assert mappingType != null; - String mappingType = request.type(); - if (mappingType == null) { - mappingType = newMappers.values().iterator().next().type(); - } else if (!mappingType.equals(newMappers.values().iterator().next().type())) { - throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); - } if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); } final Map mappings = new HashMap<>(); - for (Map.Entry entry : newMappers.entrySet()) { - String index = entry.getKey(); + for (String index : request.indices()) { // do the actual merge here on the master, and update the mapping source - DocumentMapper newMapper = entry.getValue(); IndexService indexService = indicesService.indexService(index); if (indexService == null) { continue; } CompressedXContent existingSource = null; - if (existingMappers.containsKey(entry.getKey())) { - existingSource = existingMappers.get(entry.getKey()).mappingSource(); + DocumentMapper existingMapper = indexService.mapperService().documentMapper(mappingType); + if (existingMapper != null) { + existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes()); + DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, true, request.updateAllTypes()); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { @@ -318,9 +313,9 @@ public class MetaDataMappingService extends AbstractComponent { } else { mappings.put(index, new MappingMetaData(mergedMapper)); if (logger.isDebugEnabled()) { - logger.debug("[{}] create_mapping [{}] with source [{}]", index, newMapper.type(), updatedSource); + logger.debug("[{}] create_mapping [{}] with source [{}]", index, mappingType, updatedSource); } else if (logger.isInfoEnabled()) { - logger.info("[{}] create_mapping [{}]", index, newMapper.type()); + logger.info("[{}] create_mapping [{}]", index, mappingType); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index de35b4712ea..37e99e8c90c 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -198,6 +198,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it + // NOTE: never apply the default here DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); // still add it as a document mapper so we have it registered and, for example, persisted back into // the cluster meta data if needed, or checked for existence @@ -211,68 +212,70 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } return mapper; } else { - return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + try (ReleasableLock lock = mappingWriteLock.acquire()) { + // only apply the default mapping if we don't have the type yet + applyDefault &= mappers.containsKey(type) == false; + return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + } } } // never expose this to the outside world, we need to reparse the doc mapper so we get fresh // instances of field mappers to properly remove existing doc mapper private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { - try (ReleasableLock lock = mappingWriteLock.acquire()) { - if (mapper.type().length() == 0) { - throw new InvalidTypeNameException("mapping type name is empty"); - } - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); - } - if (mapper.type().charAt(0) == '_') { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'"); - } - if (mapper.type().contains("#")) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include '#' in it"); - } - if (mapper.type().contains(",")) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); - } - if (mapper.type().equals(mapper.parentFieldMapper().type())) { - throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); - } - if (typeNameStartsWithIllegalDot(mapper)) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); - } else { - logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); - } - } - // we can add new field/object mappers while the old ones are there - // since we get new instances of those, and when we remove, we remove - // by instance equality - DocumentMapper oldMapper = mappers.get(mapper.type()); - - if (oldMapper != null) { - oldMapper.merge(mapper.mapping(), false, updateAllTypes); - return oldMapper; + if (mapper.type().length() == 0) { + throw new InvalidTypeNameException("mapping type name is empty"); + } + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); + } + if (mapper.type().charAt(0) == '_') { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'"); + } + if (mapper.type().contains("#")) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include '#' in it"); + } + if (mapper.type().contains(",")) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); + } + if (mapper.type().equals(mapper.parentFieldMapper().type())) { + throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); + } + if (typeNameStartsWithIllegalDot(mapper)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { + throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); } else { - Tuple, Collection> newMappers = checkMappersCompatibility( - mapper.type(), mapper.mapping(), updateAllTypes); - Collection newObjectMappers = newMappers.v1(); - Collection newFieldMappers = newMappers.v2(); - addMappers(mapper.type(), newObjectMappers, newFieldMappers); - - for (DocumentTypeListener typeListener : typeListeners) { - typeListener.beforeCreate(mapper); - } - mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); - if (mapper.parentFieldMapper().active()) { - Set newParentTypes = new HashSet<>(parentTypes.size() + 1); - newParentTypes.addAll(parentTypes); - newParentTypes.add(mapper.parentFieldMapper().type()); - parentTypes = unmodifiableSet(newParentTypes); - } - assert assertSerialization(mapper); - return mapper; + logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); } } + // we can add new field/object mappers while the old ones are there + // since we get new instances of those, and when we remove, we remove + // by instance equality + DocumentMapper oldMapper = mappers.get(mapper.type()); + + if (oldMapper != null) { + oldMapper.merge(mapper.mapping(), false, updateAllTypes); + return oldMapper; + } else { + Tuple, Collection> newMappers = checkMappersCompatibility( + mapper.type(), mapper.mapping(), updateAllTypes); + Collection newObjectMappers = newMappers.v1(); + Collection newFieldMappers = newMappers.v2(); + addMappers(mapper.type(), newObjectMappers, newFieldMappers); + + for (DocumentTypeListener typeListener : typeListeners) { + typeListener.beforeCreate(mapper); + } + mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); + if (mapper.parentFieldMapper().active()) { + Set newParentTypes = new HashSet<>(parentTypes.size() + 1); + newParentTypes.addAll(parentTypes); + newParentTypes.add(mapper.parentFieldMapper().type()); + parentTypes = unmodifiableSet(newParentTypes); + } + assert assertSerialization(mapper); + return mapper; + } } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { From b2b7b30d10f03d6df342c7fdb36e32047907ccc4 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 18 Nov 2015 13:00:14 +0100 Subject: [PATCH 088/322] BulkProcessor retries after request handling has been rejected due to a full thread pool With this commit we introduce limited retries with a backoff logic to BulkProcessor when a bulk request has been rejeced with an EsRejectedExecutionException. Fixes #14620. --- .../action/bulk/BackoffPolicy.java | 203 +++++++++++++++ .../action/bulk/BulkProcessor.java | 101 ++------ .../action/bulk/BulkRequestHandler.java | 160 ++++++++++++ .../org/elasticsearch/action/bulk/Retry.java | 237 ++++++++++++++++++ .../action/bulk/BulkProcessorRetryIT.java | 164 ++++++++++++ .../elasticsearch/action/bulk/RetryTests.java | 201 +++++++++++++++ 6 files changed, 988 insertions(+), 78 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java create mode 100644 core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java create mode 100644 core/src/main/java/org/elasticsearch/action/bulk/Retry.java create mode 100644 core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java create mode 100644 core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java b/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java new file mode 100644 index 00000000000..a0ccca0fb5c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java @@ -0,0 +1,203 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * Provides a backoff policy for bulk requests. Whenever a bulk request is rejected due to resource constraints (i.e. the client's internal + * thread pool is full), the backoff policy decides how long the bulk processor will wait before the operation is retried internally. + * + * Notes for implementing custom subclasses: + * + * The underlying mathematical principle of BackoffPolicy are progressions which can be either finite or infinite although + * the latter should not be used for retrying. A progression can be mapped to a java.util.Iterator with the following + * semantics: + * + *
    + *
  • #hasNext() determines whether the progression has more elements. Return true for infinite progressions
  • + *
  • #next() determines the next element in the progression, i.e. the next wait time period
  • + *
+ * + * Note that backoff policies are exposed as Iterables in order to be consumed multiple times. + */ +public abstract class BackoffPolicy implements Iterable { + private static final BackoffPolicy NO_BACKOFF = new NoBackoff(); + + /** + * Creates a backoff policy that will not allow any backoff, i.e. an operation will fail after the first attempt. + * + * @return A backoff policy without any backoff period. The returned instance is thread safe. + */ + public static BackoffPolicy noBackoff() { + return NO_BACKOFF; + } + + /** + * Creates an new constant backoff policy with the provided configuration. + * + * @param delay The delay defines how long to wait between retry attempts. Must not be null. + * Must be <= Integer.MAX_VALUE ms. + * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number. + * @return A backoff policy with a constant wait time between retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy constantBackoff(TimeValue delay, int maxNumberOfRetries) { + return new ConstantBackoff(checkDelay(delay), maxNumberOfRetries); + } + + /** + * Creates an new exponential backoff policy with a default configuration of 50 ms initial wait period and 8 retries taking + * roughly 5.1 seconds in total. + * + * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy exponentialBackoff() { + return exponentialBackoff(TimeValue.timeValueMillis(50), 8); + } + + /** + * Creates an new exponential backoff policy with the provided configuration. + * + * @param initialDelay The initial delay defines how long to wait for the first retry attempt. Must not be null. + * Must be <= Integer.MAX_VALUE ms. + * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number. + * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy exponentialBackoff(TimeValue initialDelay, int maxNumberOfRetries) { + return new ExponentialBackoff((int) checkDelay(initialDelay).millis(), maxNumberOfRetries); + } + + private static TimeValue checkDelay(TimeValue delay) { + if (delay.millis() > Integer.MAX_VALUE) { + throw new IllegalArgumentException("delay must be <= " + Integer.MAX_VALUE + " ms"); + } + return delay; + } + + private static class NoBackoff extends BackoffPolicy { + @Override + public Iterator iterator() { + return new Iterator() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public TimeValue next() { + throw new NoSuchElementException("No backoff"); + } + }; + } + } + + private static class ExponentialBackoff extends BackoffPolicy { + private final int start; + + private final int numberOfElements; + + private ExponentialBackoff(int start, int numberOfElements) { + assert start >= 0; + assert numberOfElements >= 0; + this.start = start; + this.numberOfElements = numberOfElements; + } + + @Override + public Iterator iterator() { + return new ExponentialBackoffIterator(start, numberOfElements); + } + } + + private static class ExponentialBackoffIterator implements Iterator { + private final int numberOfElements; + + private final int start; + + private int currentlyConsumed; + + private ExponentialBackoffIterator(int start, int numberOfElements) { + this.start = start; + this.numberOfElements = numberOfElements; + } + + @Override + public boolean hasNext() { + return currentlyConsumed < numberOfElements; + } + + @Override + public TimeValue next() { + if (!hasNext()) { + throw new NoSuchElementException("Only up to " + numberOfElements + " elements"); + } + int result = start + 10 * ((int) Math.exp(0.8d * (currentlyConsumed)) - 1); + currentlyConsumed++; + return TimeValue.timeValueMillis(result); + } + } + + private static final class ConstantBackoff extends BackoffPolicy { + private final TimeValue delay; + + private final int numberOfElements; + + public ConstantBackoff(TimeValue delay, int numberOfElements) { + assert numberOfElements >= 0; + this.delay = delay; + this.numberOfElements = numberOfElements; + } + + @Override + public Iterator iterator() { + return new ConstantBackoffIterator(delay, numberOfElements); + } + } + + private static final class ConstantBackoffIterator implements Iterator { + private final TimeValue delay; + private final int numberOfElements; + private int curr; + + public ConstantBackoffIterator(TimeValue delay, int numberOfElements) { + this.delay = delay; + this.numberOfElements = numberOfElements; + } + + @Override + public boolean hasNext() { + return curr < numberOfElements; + } + + @Override + public TimeValue next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + curr++; + return delay; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 2a7c185ad8a..316ec7a548e 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -48,7 +47,7 @@ public class BulkProcessor implements Closeable { /** * A listener for the execution. */ - public static interface Listener { + public interface Listener { /** * Callback before the bulk is executed. @@ -79,6 +78,7 @@ public class BulkProcessor implements Closeable { private int bulkActions = 1000; private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB); private TimeValue flushInterval = null; + private BackoffPolicy backoffPolicy = BackoffPolicy.noBackoff(); /** * Creates a builder of bulk processor with the client to use and the listener that will be used @@ -136,11 +136,25 @@ public class BulkProcessor implements Closeable { return this; } + /** + * Sets a custom backoff policy. The backoff policy defines how the bulk processor should handle retries of bulk requests internally + * in case they have failed due to resource constraints (i.e. a thread pool was full). + * + * The default is to not back off, i.e. failing immediately. + */ + public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) { + if (backoffPolicy == null) { + throw new NullPointerException("'backoffPolicy' must not be null. To disable backoff, pass BackoffPolicy.noBackoff()"); + } + this.backoffPolicy = backoffPolicy; + return this; + } + /** * Builds a new bulk processor. */ public BulkProcessor build() { - return new BulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval); + return new BulkProcessor(client, backoffPolicy, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval); } } @@ -152,38 +166,27 @@ public class BulkProcessor implements Closeable { return new Builder(client, listener); } - private final Client client; - private final Listener listener; - - private final String name; - - private final int concurrentRequests; private final int bulkActions; private final long bulkSize; - private final TimeValue flushInterval; - private final Semaphore semaphore; + private final ScheduledThreadPoolExecutor scheduler; private final ScheduledFuture scheduledFuture; private final AtomicLong executionIdGen = new AtomicLong(); private BulkRequest bulkRequest; + private final BulkRequestHandler bulkRequestHandler; private volatile boolean closed = false; - BulkProcessor(Client client, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) { - this.client = client; - this.listener = listener; - this.name = name; - this.concurrentRequests = concurrentRequests; + BulkProcessor(Client client, BackoffPolicy backoffPolicy, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) { this.bulkActions = bulkActions; this.bulkSize = bulkSize.bytes(); - this.semaphore = new Semaphore(concurrentRequests); this.bulkRequest = new BulkRequest(); + this.bulkRequestHandler = (concurrentRequests == 0) ? BulkRequestHandler.syncHandler(client, backoffPolicy, listener) : BulkRequestHandler.asyncHandler(client, backoffPolicy, listener, concurrentRequests); - this.flushInterval = flushInterval; if (flushInterval != null) { this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, EsExecutors.daemonThreadFactory(client.settings(), (name != null ? "[" + name + "]" : "") + "bulk_processor")); this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); @@ -231,14 +234,7 @@ public class BulkProcessor implements Closeable { if (bulkRequest.numberOfActions() > 0) { execute(); } - if (this.concurrentRequests < 1) { - return true; - } - if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) { - semaphore.release(this.concurrentRequests); - return true; - } - return false; + return this.bulkRequestHandler.awaitClose(timeout, unit); } /** @@ -308,58 +304,7 @@ public class BulkProcessor implements Closeable { final long executionId = executionIdGen.incrementAndGet(); this.bulkRequest = new BulkRequest(); - - if (concurrentRequests == 0) { - // execute in a blocking fashion... - boolean afterCalled = false; - try { - listener.beforeBulk(executionId, bulkRequest); - BulkResponse bulkItemResponses = client.bulk(bulkRequest).actionGet(); - afterCalled = true; - listener.afterBulk(executionId, bulkRequest, bulkItemResponses); - } catch (Exception e) { - if (!afterCalled) { - listener.afterBulk(executionId, bulkRequest, e); - } - } - } else { - boolean success = false; - boolean acquired = false; - try { - listener.beforeBulk(executionId, bulkRequest); - semaphore.acquire(); - acquired = true; - client.bulk(bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse response) { - try { - listener.afterBulk(executionId, bulkRequest, response); - } finally { - semaphore.release(); - } - } - - @Override - public void onFailure(Throwable e) { - try { - listener.afterBulk(executionId, bulkRequest, e); - } finally { - semaphore.release(); - } - } - }); - success = true; - } catch (InterruptedException e) { - Thread.interrupted(); - listener.afterBulk(executionId, bulkRequest, e); - } catch (Throwable t) { - listener.afterBulk(executionId, bulkRequest, t); - } finally { - if (!success && acquired) { // if we fail on client.bulk() release the semaphore - semaphore.release(); - } - } - } + this.bulkRequestHandler.execute(bulkRequest, executionId); } private boolean isOverTheLimit() { diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java new file mode 100644 index 00000000000..ffc985bd510 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; + +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +/** + * Abstracts the low-level details of bulk request handling + */ +abstract class BulkRequestHandler { + protected final ESLogger logger; + protected final Client client; + + protected BulkRequestHandler(Client client) { + this.client = client; + this.logger = Loggers.getLogger(getClass(), client.settings()); + } + + + public abstract void execute(BulkRequest bulkRequest, long executionId); + + public abstract boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException; + + + public static BulkRequestHandler syncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { + return new SyncBulkRequestHandler(client, backoffPolicy, listener); + } + + public static BulkRequestHandler asyncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { + return new AsyncBulkRequestHandler(client, backoffPolicy, listener, concurrentRequests); + } + + private static class SyncBulkRequestHandler extends BulkRequestHandler { + private final BulkProcessor.Listener listener; + private final BackoffPolicy backoffPolicy; + + public SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { + super(client); + this.backoffPolicy = backoffPolicy; + this.listener = listener; + } + + @Override + public void execute(BulkRequest bulkRequest, long executionId) { + boolean afterCalled = false; + try { + listener.beforeBulk(executionId, bulkRequest); + BulkResponse bulkResponse = Retry + .on(EsRejectedExecutionException.class) + .policy(backoffPolicy) + .withSyncBackoff(client, bulkRequest); + afterCalled = true; + listener.afterBulk(executionId, bulkRequest, bulkResponse); + } catch (Exception e) { + if (!afterCalled) { + logger.warn("Failed to executed bulk request {}.", e, executionId); + listener.afterBulk(executionId, bulkRequest, e); + } + } + } + + @Override + public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException { + // we are "closed" immediately as there is no request in flight + return true; + } + } + + private static class AsyncBulkRequestHandler extends BulkRequestHandler { + private final BackoffPolicy backoffPolicy; + private final BulkProcessor.Listener listener; + private final Semaphore semaphore; + private final int concurrentRequests; + + private AsyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { + super(client); + this.backoffPolicy = backoffPolicy; + assert concurrentRequests > 0; + this.listener = listener; + this.concurrentRequests = concurrentRequests; + this.semaphore = new Semaphore(concurrentRequests); + } + + @Override + public void execute(BulkRequest bulkRequest, long executionId) { + boolean bulkRequestSetupSuccessful = false; + boolean acquired = false; + try { + listener.beforeBulk(executionId, bulkRequest); + semaphore.acquire(); + acquired = true; + Retry.on(EsRejectedExecutionException.class) + .policy(backoffPolicy) + .withAsyncBackoff(client, bulkRequest, new ActionListener() { + @Override + public void onResponse(BulkResponse response) { + try { + listener.afterBulk(executionId, bulkRequest, response); + } finally { + semaphore.release(); + } + } + + @Override + public void onFailure(Throwable e) { + try { + listener.afterBulk(executionId, bulkRequest, e); + } finally { + semaphore.release(); + } + } + }); + bulkRequestSetupSuccessful = true; + } catch (InterruptedException e) { + // This is intentionally wrong to avoid changing the behaviour implicitly with this PR. It will be fixed in #14833 + Thread.interrupted(); + listener.afterBulk(executionId, bulkRequest, e); + } catch (Throwable t) { + logger.warn("Failed to executed bulk request {}.", t, executionId); + listener.afterBulk(executionId, bulkRequest, t); + } finally { + if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore + semaphore.release(); + } + } + } + + @Override + public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException { + if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) { + semaphore.release(this.concurrentRequests); + return true; + } + return false; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java new file mode 100644 index 00000000000..477e61045ba --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -0,0 +1,237 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.*; +import java.util.function.Predicate; + +/** + * Encapsulates synchronous and asynchronous retry logic. + */ +class Retry { + private final Class retryOnThrowable; + + private BackoffPolicy backoffPolicy; + + public static Retry on(Class retryOnThrowable) { + return new Retry(retryOnThrowable); + } + + /** + * @param backoffPolicy The backoff policy that defines how long and how often to wait for retries. + */ + public Retry policy(BackoffPolicy backoffPolicy) { + this.backoffPolicy = backoffPolicy; + return this; + } + + Retry(Class retryOnThrowable) { + this.retryOnThrowable = retryOnThrowable; + } + + /** + * Invokes #bulk(BulkRequest, ActionListener) on the provided client. Backs off on the provided exception and delegates results to the + * provided listener. + * + * @param client Client invoking the bulk request. + * @param bulkRequest The bulk request that should be executed. + * @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not + */ + public void withAsyncBackoff(Client client, BulkRequest bulkRequest, ActionListener listener) { + AsyncRetryHandler r = new AsyncRetryHandler(retryOnThrowable, backoffPolicy, client, listener); + r.execute(bulkRequest); + + } + + /** + * Invokes #bulk(BulkRequest) on the provided client. Backs off on the provided exception. + * + * @param client Client invoking the bulk request. + * @param bulkRequest The bulk request that should be executed. + * @return the bulk response as returned by the client. + * @throws Exception Any exception thrown by the callable. + */ + public BulkResponse withSyncBackoff(Client client, BulkRequest bulkRequest) throws Exception { + return SyncRetryHandler + .create(retryOnThrowable, backoffPolicy, client) + .executeBlocking(bulkRequest) + .actionGet(); + } + + static class AbstractRetryHandler implements ActionListener { + private final ESLogger logger; + private final Client client; + private final ActionListener listener; + private final Iterator backoff; + private final Class retryOnThrowable; + // Access only when holding a client-side lock, see also #addResponses() + private final List responses = new ArrayList<>(); + private final long startTimestampNanos; + // needed to construct the next bulk request based on the response to the previous one + // volatile as we're called from a scheduled thread + private volatile BulkRequest currentBulkRequest; + private volatile ScheduledFuture scheduledRequestFuture; + + public AbstractRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { + this.retryOnThrowable = retryOnThrowable; + this.backoff = backoffPolicy.iterator(); + this.client = client; + this.listener = listener; + this.logger = Loggers.getLogger(getClass(), client.settings()); + // in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood + this.startTimestampNanos = System.nanoTime(); + } + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + if (!bulkItemResponses.hasFailures()) { + // we're done here, include all responses + addResponses(bulkItemResponses, (r -> true)); + finishHim(); + } else { + if (canRetry(bulkItemResponses)) { + addResponses(bulkItemResponses, (r -> !r.isFailed())); + retry(createBulkRequestForRetry(bulkItemResponses)); + } else { + addResponses(bulkItemResponses, (r -> true)); + finishHim(); + } + } + } + + @Override + public void onFailure(Throwable e) { + try { + listener.onFailure(e); + } finally { + FutureUtils.cancel(scheduledRequestFuture); + } + } + + private void retry(BulkRequest bulkRequestForRetry) { + assert backoff.hasNext(); + TimeValue next = backoff.next(); + logger.trace("Retry of bulk request scheduled in {} ms.", next.millis()); + scheduledRequestFuture = client.threadPool().schedule(next, ThreadPool.Names.SAME, (() -> this.execute(bulkRequestForRetry))); + } + + private BulkRequest createBulkRequestForRetry(BulkResponse bulkItemResponses) { + BulkRequest requestToReissue = new BulkRequest(); + int index = 0; + for (BulkItemResponse bulkItemResponse : bulkItemResponses.getItems()) { + if (bulkItemResponse.isFailed()) { + requestToReissue.add(currentBulkRequest.requests().get(index)); + } + index++; + } + return requestToReissue; + } + + private boolean canRetry(BulkResponse bulkItemResponses) { + if (!backoff.hasNext()) { + return false; + } + for (BulkItemResponse bulkItemResponse : bulkItemResponses) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + Throwable rootCause = ExceptionsHelper.unwrapCause(cause); + if (!rootCause.getClass().equals(retryOnThrowable)) { + return false; + } + } + } + return true; + } + + private void finishHim() { + try { + listener.onResponse(getAccumulatedResponse()); + } finally { + FutureUtils.cancel(scheduledRequestFuture); + } + } + + private void addResponses(BulkResponse response, Predicate filter) { + for (BulkItemResponse bulkItemResponse : response) { + if (filter.test(bulkItemResponse)) { + // Use client-side lock here to avoid visibility issues. This method may be called multiple times + // (based on how many retries we have to issue) and relying that the response handling code will be + // scheduled on the same thread is fragile. + synchronized (responses) { + responses.add(bulkItemResponse); + } + } + } + } + + private BulkResponse getAccumulatedResponse() { + BulkItemResponse[] itemResponses; + synchronized (responses) { + itemResponses = responses.toArray(new BulkItemResponse[1]); + } + long stopTimestamp = System.nanoTime(); + long totalLatencyMs = TimeValue.timeValueNanos(stopTimestamp - startTimestampNanos).millis(); + return new BulkResponse(itemResponses, totalLatencyMs); + } + + public void execute(BulkRequest bulkRequest) { + this.currentBulkRequest = bulkRequest; + client.bulk(bulkRequest, this); + } + } + + static class AsyncRetryHandler extends AbstractRetryHandler { + public AsyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { + super(retryOnThrowable, backoffPolicy, client, listener); + } + } + + static class SyncRetryHandler extends AbstractRetryHandler { + private final PlainActionFuture actionFuture; + + public static SyncRetryHandler create(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client) { + PlainActionFuture actionFuture = PlainActionFuture.newFuture(); + return new SyncRetryHandler(retryOnThrowable, backoffPolicy, client, actionFuture); + } + + public SyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture actionFuture) { + super(retryOnThrowable, backoffPolicy, client, actionFuture); + this.actionFuture = actionFuture; + } + + public ActionFuture executeBlocking(BulkRequest bulkRequest) { + super.execute(bulkRequest); + return actionFuture; + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java new file mode 100644 index 00000000000..3c38e2ef0fa --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -0,0 +1,164 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.Matcher; + +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 2) +public class BulkProcessorRetryIT extends ESIntegTestCase { + private static final String INDEX_NAME = "test"; + private static final String TYPE_NAME = "type"; + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + //Have very low pool and queue sizes to overwhelm internal pools easily + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("threadpool.generic.size", 1) + .put("threadpool.generic.queue_size", 1) + // don't mess with this one! It's quite sensitive to a low queue size + // (see also ThreadedActionListener which is happily spawning threads even when we already got rejected) + //.put("threadpool.listener.queue_size", 1) + .put("threadpool.get.queue_size", 1) + // default is 50 + .put("threadpool.bulk.queue_size", 20) + .build(); + } + + + public void testBulkRejectionLoadWithoutBackoff() throws Throwable { + boolean rejectedExecutionExpected = true; + executeBulkRejectionLoad(BackoffPolicy.noBackoff(), rejectedExecutionExpected); + } + + public void testBulkRejectionLoadWithBackoff() throws Throwable { + boolean rejectedExecutionExpected = false; + executeBulkRejectionLoad(BackoffPolicy.exponentialBackoff(), rejectedExecutionExpected); + } + + private void executeBulkRejectionLoad(BackoffPolicy backoffPolicy, boolean rejectedExecutionExpected) throws Throwable { + int numberOfAsyncOps = randomIntBetween(600, 700); + final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); + final Set responses = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + assertAcked(prepareCreate(INDEX_NAME)); + ensureGreen(); + + BulkProcessor bulkProcessor = BulkProcessor.builder(client(), new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + // no op + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + responses.add(response); + latch.countDown(); + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + responses.add(failure); + latch.countDown(); + } + }).setBulkActions(1) + // zero means that we're in the sync case, more means that we're in the async case + .setConcurrentRequests(randomIntBetween(0, 100)) + .setBackoffPolicy(backoffPolicy) + .build(); + indexDocs(bulkProcessor, numberOfAsyncOps); + latch.await(10, TimeUnit.SECONDS); + bulkProcessor.close(); + + assertThat(responses.size(), equalTo(numberOfAsyncOps)); + + // validate all responses + for (Object response : responses) { + if (response instanceof BulkResponse) { + BulkResponse bulkResponse = (BulkResponse) response; + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + BulkItemResponse.Failure failure = bulkItemResponse.getFailure(); + Throwable rootCause = ExceptionsHelper.unwrapCause(failure.getCause()); + if (rootCause instanceof EsRejectedExecutionException) { + if (rejectedExecutionExpected == false) { + // we're not expecting that we overwhelmed it even once + throw new AssertionError("Unexpected failure reason", rootCause); + } + } else { + throw new AssertionError("Unexpected failure", rootCause); + } + } + } + } else { + Throwable t = (Throwable) response; + // we're not expecting any other errors + throw new AssertionError("Unexpected failure", t); + } + } + + client().admin().indices().refresh(new RefreshRequest()).get(); + + // validate we did not create any duplicates due to retries + Matcher searchResultCount; + if (rejectedExecutionExpected) { + // it is ok if we lost some index operations to rejected executions + searchResultCount = lessThanOrEqualTo((long) numberOfAsyncOps); + } else { + searchResultCount = equalTo((long) numberOfAsyncOps); + } + + SearchResponse results = client() + .prepareSearch(INDEX_NAME) + .setTypes(TYPE_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setSize(0) + .get(); + assertThat(results.getHits().totalHits(), searchResultCount); + } + + private static void indexDocs(BulkProcessor processor, int numDocs) { + for (int i = 1; i <= numDocs; i++) { + processor.add(client() + .prepareIndex() + .setIndex(INDEX_NAME) + .setType(TYPE_NAME) + .setId(Integer.toString(i)) + .setSource("field", randomRealisticUnicodeOfLengthBetween(1, 30)) + .request()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java new file mode 100644 index 00000000000..e974a6220ed --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -0,0 +1,201 @@ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.util.concurrent.CountDownLatch; + +import static org.hamcrest.Matchers.*; + +public class RetryTests extends ESTestCase { + // no need to wait fof a long time in tests + private static final TimeValue DELAY = TimeValue.timeValueMillis(1L); + private static final int CALLS_TO_FAIL = 5; + + private MockBulkClient bulkClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.bulkClient = new MockBulkClient(getTestName(), CALLS_TO_FAIL); + } + + @Override + @After + public void tearDown() throws Exception { + super.tearDown(); + this.bulkClient.close(); + } + + private BulkRequest createBulkRequest() { + BulkRequest request = new BulkRequest(); + request.add(new UpdateRequest("shop", "products", "1")); + request.add(new UpdateRequest("shop", "products", "2")); + request.add(new UpdateRequest("shop", "products", "3")); + request.add(new UpdateRequest("shop", "products", "4")); + request.add(new UpdateRequest("shop", "products", "5")); + return request; + } + + public void testSyncRetryBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); + + BulkRequest bulkRequest = createBulkRequest(); + BulkResponse response = Retry + .on(EsRejectedExecutionException.class) + .policy(backoff) + .withSyncBackoff(bulkClient, bulkRequest); + + assertFalse(response.hasFailures()); + assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); + } + + public void testSyncRetryFailsAfterBackoff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); + + BulkRequest bulkRequest = createBulkRequest(); + BulkResponse response = Retry + .on(EsRejectedExecutionException.class) + .policy(backoff) + .withSyncBackoff(bulkClient, bulkRequest); + + assertTrue(response.hasFailures()); + assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); + } + + public void testAsyncRetryBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); + AssertingListener listener = new AssertingListener(); + + BulkRequest bulkRequest = createBulkRequest(); + Retry.on(EsRejectedExecutionException.class) + .policy(backoff) + .withAsyncBackoff(bulkClient, bulkRequest, listener); + + listener.awaitCallbacksCalled(); + listener.assertOnResponseCalled(); + listener.assertResponseWithoutFailures(); + listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions()); + listener.assertOnFailureNeverCalled(); + } + + public void testAsyncRetryFailsAfterBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); + AssertingListener listener = new AssertingListener(); + + BulkRequest bulkRequest = createBulkRequest(); + Retry.on(EsRejectedExecutionException.class) + .policy(backoff) + .withAsyncBackoff(bulkClient, bulkRequest, listener); + + listener.awaitCallbacksCalled(); + + listener.assertOnResponseCalled(); + listener.assertResponseWithFailures(); + listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions()); + listener.assertOnFailureNeverCalled(); + } + + private static class AssertingListener implements ActionListener { + private final CountDownLatch latch; + private int countOnResponseCalled = 0; + private Throwable lastFailure; + private BulkResponse response; + + private AssertingListener() { + latch = new CountDownLatch(1); + } + + public void awaitCallbacksCalled() throws InterruptedException { + latch.await(); + } + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + latch.countDown(); + this.response = bulkItemResponses; + countOnResponseCalled++; + } + + @Override + public void onFailure(Throwable e) { + latch.countDown(); + this.lastFailure = e; + } + + public void assertOnResponseCalled() { + assertThat(countOnResponseCalled, equalTo(1)); + } + + public void assertResponseWithNumberOfItems(int numItems) { + assertThat(response.getItems().length, equalTo(numItems)); + } + + public void assertResponseWithoutFailures() { + assertThat(response, notNullValue()); + assertFalse("Response should not have failures", response.hasFailures()); + } + + public void assertResponseWithFailures() { + assertThat(response, notNullValue()); + assertTrue("Response should have failures", response.hasFailures()); + } + + public void assertOnFailureNeverCalled() { + assertThat(lastFailure, nullValue()); + } + } + + private static class MockBulkClient extends NoOpClient { + private int numberOfCallsToFail; + + private MockBulkClient(String testName, int numberOfCallsToFail) { + super(testName); + this.numberOfCallsToFail = numberOfCallsToFail; + } + + @Override + public ActionFuture bulk(BulkRequest request) { + PlainActionFuture responseFuture = new PlainActionFuture<>(); + bulk(request, responseFuture); + return responseFuture; + } + + @Override + public void bulk(BulkRequest request, ActionListener listener) { + // do everything synchronously, that's fine for a test + boolean shouldFail = numberOfCallsToFail > 0; + numberOfCallsToFail--; + + BulkItemResponse[] itemResponses = new BulkItemResponse[request.requests().size()]; + // if we have to fail, we need to fail at least once "reliably", the rest can be random + int itemToFail = randomInt(request.requests().size() - 1); + for (int idx = 0; idx < request.requests().size(); idx++) { + if (shouldFail && (randomBoolean() || idx == itemToFail)) { + itemResponses[idx] = failedResponse(); + } else { + itemResponses[idx] = successfulResponse(); + } + } + listener.onResponse(new BulkResponse(itemResponses, 1000L)); + } + + private BulkItemResponse successfulResponse() { + return new BulkItemResponse(1, "update", new DeleteResponse()); + } + + private BulkItemResponse failedResponse() { + return new BulkItemResponse(1, "update", new BulkItemResponse.Failure("test", "test", "1", new EsRejectedExecutionException("pool full"))); + } + } +} From 028b206a1117b79a16c6e85fdcbf5652f2848fff Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Dec 2015 08:24:52 -0500 Subject: [PATCH 089/322] Simpler using compressed oops flag representation This commit modifies the internal representation of the JVM flag UseCompressedOops to just be a String. This means we can just store the value of the flag or "unknown" directly so that we do not have to engage in shenanigans with three-valued logic around a boxed boolean. Relates #15489 --- .../elasticsearch/env/NodeEnvironment.java | 8 ++--- .../elasticsearch/monitor/jvm/JvmInfo.java | 34 +++++++++---------- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 86b6b704a72..93e95dfaa96 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -296,10 +296,10 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } private void maybeLogHeapDetails() { - ByteSizeValue maxHeapSize = JvmInfo.jvmInfo().getMem().getHeapMax(); - Boolean usingCompressedOops = JvmInfo.jvmInfo().usingCompressedOops(); - String usingCompressedOopsStatus = usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops); - logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, usingCompressedOopsStatus); + JvmInfo jvmInfo = JvmInfo.jvmInfo(); + ByteSizeValue maxHeapSize = jvmInfo.getMem().getHeapMax(); + String useCompressedOops = jvmInfo.useCompressedOops(); + logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops); } private static String toString(Collection items) { diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index 8e1fb9ba7db..22f0ed912d0 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -116,11 +116,10 @@ public class JvmInfo implements Streamable, ToXContent { Method vmOptionMethod = clazz.getMethod("getVMOption", String.class); Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); Method valueMethod = vmOptionClazz.getMethod("getValue"); - String value = (String)valueMethod.invoke(useCompressedOopsVmOption); - info.usingCompressedOops = Boolean.parseBoolean(value); + info.useCompressedOops = (String)valueMethod.invoke(useCompressedOopsVmOption); } catch (Throwable t) { // unable to deduce the state of compressed oops - // usingCompressedOops will hold its default value of null + info.useCompressedOops = "unknown"; } INSTANCE = info; @@ -157,7 +156,7 @@ public class JvmInfo implements Streamable, ToXContent { String[] gcCollectors = Strings.EMPTY_ARRAY; String[] memoryPools = Strings.EMPTY_ARRAY; - private Boolean usingCompressedOops; + private String useCompressedOops; private JvmInfo() { } @@ -282,8 +281,16 @@ public class JvmInfo implements Streamable, ToXContent { return this.systemProperties; } - public Boolean usingCompressedOops() { - return this.usingCompressedOops; + /** + * The value of the JVM flag UseCompressedOops, if available otherwise + * "unknown". The value "unknown" indicates that an attempt was + * made to obtain the value of the flag on this JVM and the attempt + * failed. + * + * @return the value of the JVM flag UseCompressedOops or "unknown" + */ + public String useCompressedOops() { + return this.useCompressedOops; } @Override @@ -307,7 +314,7 @@ public class JvmInfo implements Streamable, ToXContent { builder.field(Fields.GC_COLLECTORS, gcCollectors); builder.field(Fields.MEMORY_POOLS, memoryPools); - builder.field(Fields.USING_COMPRESSED_OOPS, usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops)); + builder.field(Fields.USING_COMPRESSED_OOPS, useCompressedOops); builder.endObject(); return builder; @@ -368,11 +375,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.readFrom(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); - if (in.readBoolean()) { - usingCompressedOops = in.readBoolean(); - } else { - usingCompressedOops = null; - } + useCompressedOops = in.readOptionalString(); } @Override @@ -397,12 +400,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.writeTo(out); out.writeStringArray(gcCollectors); out.writeStringArray(memoryPools); - if (usingCompressedOops != null) { - out.writeBoolean(true); - out.writeBoolean(usingCompressedOops); - } else { - out.writeBoolean(false); - } + out.writeOptionalString(useCompressedOops); } public static class Mem implements Streamable { From 207ccc3cd69b920def8488a8d1711eecd7f58760 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 17 Dec 2015 14:30:31 +0100 Subject: [PATCH 090/322] Add missing header in RetryTests --- .../elasticsearch/action/bulk/RetryTests.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index e974a6220ed..8f07b58fe6f 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -1,3 +1,21 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.elasticsearch.action.bulk; import org.elasticsearch.action.ActionFuture; From 9149630749ea76d1294f8c3409f57a76a1af6d05 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Dec 2015 18:45:22 -0500 Subject: [PATCH 091/322] Reroute once per batch of shard failures This commit modifies the behavior after publication of a new cluster state to only invoke the reroute logic once per batch of shard failures rather than once per shard failure. --- .../action/shard/ShardStateAction.java | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 3a01ced6ebf..a04a6d7bd51 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -148,20 +148,9 @@ public class ShardStateAction extends AbstractComponent { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { try { - int numberOfUnassignedShards = newState.getRoutingNodes().unassigned().size(); - if (oldState != newState && numberOfUnassignedShards > 0) { - String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shard [%s]", numberOfUnassignedShards, request.shardRouting); - if (logger.isTraceEnabled()) { - logger.trace(reason + ", scheduling a reroute"); - } - routingService.reroute(reason); - } - } finally { - try { - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } catch (Throwable channelThrowable) { - logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting); - } + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } catch (Throwable channelThrowable) { + logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting); } } } @@ -189,6 +178,18 @@ public class ShardStateAction extends AbstractComponent { } return batchResultBuilder.build(maybeUpdatedState); } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + int numberOfUnassignedShards = newClusterState.getRoutingNodes().unassigned().size(); + if (numberOfUnassignedShards > 0) { + String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shards", numberOfUnassignedShards); + if (logger.isTraceEnabled()) { + logger.trace(reason + ", scheduling a reroute"); + } + routingService.reroute(reason); + } + } } private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); From 8543d7795e6982b1a4481bb2498b3a358c3e8683 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 17 Dec 2015 15:12:16 +0100 Subject: [PATCH 092/322] Convert several more settings --- .../org/elasticsearch/common/settings/ClusterSettings.java | 4 +++- .../java/org/elasticsearch/common/settings/Setting.java | 5 ++++- .../java/org/elasticsearch/common/settings/Settings.java | 6 +++++- .../main/java/org/elasticsearch/transport/Transport.java | 4 ++++ .../java/org/elasticsearch/transport/TransportService.java | 6 ++++-- .../org/elasticsearch/transport/netty/NettyTransport.java | 2 +- .../elasticsearch/gateway/ReusePeerRecoverySharedTest.java | 4 ++-- 7 files changed, 23 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 4e922ca28e3..ac976268a0c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -36,6 +36,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import java.util.*; @@ -134,5 +135,6 @@ public final class ClusterSettings extends AbstractScopedSettings { ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, InternalClusterService.CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, - HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING))); + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, + Transport.TRANSPORT_PROFILES_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 42f77a18c9c..236df5c567b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -312,6 +312,9 @@ public class Setting extends ToXContentToBytes { } public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { + return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope); + } + public static Setting> listSetting(String key, Function> defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { Function> parser = (s) -> { try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){ XContentParser.Token token = xContentParser.nextToken(); @@ -330,7 +333,7 @@ public class Setting extends ToXContentToBytes { throw new IllegalArgumentException("failed to parse array", e); } }; - return new Setting>(key, arrayToParsableString(defaultStringValue.toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { + return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override public String getRaw(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index f8dd5d4f1f6..989b05d4bf2 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -652,7 +652,11 @@ public final class Settings implements ToXContent { * Returns group settings for the given setting prefix. */ public Map getAsGroups() throws SettingsException { - return getGroupsInternal("", false); + return getAsGroups(false); + } + + public Map getAsGroups(boolean ignoreNonGrouped) throws SettingsException { + return getGroupsInternal("", ignoreNonGrouped); } /** diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index 10fa9b239dc..8e4b7a47b1b 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -21,6 +21,8 @@ package org.elasticsearch.transport; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -34,6 +36,8 @@ import java.util.Map; public interface Transport extends LifecycleComponent { + Setting TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", false, Setting.Scope.CLUSTER); + public static class TransportSettings { public static final String TRANSPORT_TCP_COMPRESS = "transport.tcp.compress"; } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 916c4863e07..444f52b9c03 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -47,6 +47,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; @@ -84,8 +85,9 @@ public class TransportService extends AbstractLifecycleComponent> TRACE_LOG_INCLUDE_SETTING = Setting.listSetting("transport.tracer.include", Collections.emptyList(), (s) -> s, true, Setting.Scope.CLUSTER); - public static final Setting> TRACE_LOG_EXCLUDE_SETTING = Setting.listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), (s) -> s, true, Setting.Scope.CLUSTER); + public static final Setting> TRACE_LOG_INCLUDE_SETTING = Setting.listSetting("transport.tracer.include", Collections.emptyList(), Function.identity(), true, Setting.Scope.CLUSTER); + public static final Setting> TRACE_LOG_EXCLUDE_SETTING = Setting.listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), Function.identity(), true, Setting.Scope.CLUSTER); + private final ESLogger tracerLog; diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 2f1c52a0ac2..37f1dc4fa0b 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -295,7 +295,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem this.serverOpenChannels = openChannels; // extract default profile first and create standard bootstrap - Map profiles = settings.getGroups("transport.profiles", true); + Map profiles = TRANSPORT_PROFILES_SETTING.get(settings()).getAsGroups(true); if (!profiles.containsKey(DEFAULT_PROFILE)) { profiles = new HashMap<>(profiles); profiles.put(DEFAULT_PROFILE, Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java index 5e16fcebe86..936a6fa09a0 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -90,7 +90,7 @@ public class ReusePeerRecoverySharedTest { // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)).get(); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)).get(); logger.info("--> full cluster restart"); restartCluster.run(); @@ -105,7 +105,7 @@ public class ReusePeerRecoverySharedTest { logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings().setTransientSettings( - settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); restartCluster.run(); From 3f87b0809eb52de81eaf0a547d28c8de2fe587ce Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 17 Dec 2015 15:22:59 +0100 Subject: [PATCH 093/322] Fix visibility issue in RetryTests --- .../test/java/org/elasticsearch/action/bulk/RetryTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 8f07b58fe6f..5728f7b54bc 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -126,9 +126,9 @@ public class RetryTests extends ESTestCase { private static class AssertingListener implements ActionListener { private final CountDownLatch latch; - private int countOnResponseCalled = 0; - private Throwable lastFailure; - private BulkResponse response; + private volatile int countOnResponseCalled = 0; + private volatile Throwable lastFailure; + private volatile BulkResponse response; private AssertingListener() { latch = new CountDownLatch(1); From b71845bf9b7f16486c5445f5c2d0b362405ecba0 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 17 Dec 2015 15:10:25 +0100 Subject: [PATCH 094/322] Remove the unused mergeScheduleFuture from IndexShard. Closes #15512 --- .../elasticsearch/index/shard/IndexShard.java | 34 +++++++++++++++---- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c0bf9244673..3c0e1c64105 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.shard; -import org.apache.lucene.index.*; +import org.apache.lucene.index.CheckIndex; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; @@ -61,7 +65,16 @@ import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.request.ShardRequestCache; import org.elasticsearch.index.codec.CodecService; -import org.elasticsearch.index.engine.*; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineClosedException; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineException; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.engine.RefreshFailedEngineException; +import org.elasticsearch.index.engine.Segment; +import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.ShardFieldData; @@ -70,7 +83,12 @@ import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.ShardGetService; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; @@ -108,7 +126,12 @@ import java.io.IOException; import java.io.PrintStream; import java.nio.channels.ClosedByInterruptException; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; @@ -151,7 +174,6 @@ public class IndexShard extends AbstractIndexShardComponent { private TimeValue refreshInterval; private volatile ScheduledFuture refreshScheduledFuture; - private volatile ScheduledFuture mergeScheduleFuture; protected volatile ShardRouting shardRouting; protected volatile IndexShardState state; protected final AtomicReference currentEngineReference = new AtomicReference<>(); @@ -766,8 +788,6 @@ public class IndexShard extends AbstractIndexShardComponent { if (state != IndexShardState.CLOSED) { FutureUtils.cancel(refreshScheduledFuture); refreshScheduledFuture = null; - FutureUtils.cancel(mergeScheduleFuture); - mergeScheduleFuture = null; } changeState(IndexShardState.CLOSED, reason); indexShardOperationCounter.decRef(); From 3a442db9bd4843bb2b12bad9279ecb35f05315cc Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 7 Dec 2015 15:09:45 +0100 Subject: [PATCH 095/322] Allocate primary shards based on allocation ids Closes #15281 --- .../shards/IndicesShardStoresResponse.java | 55 ++- .../TransportIndicesShardStoresAction.java | 16 +- .../cluster/metadata/IndexMetaData.java | 2 +- .../metadata/MetaDataIndexStateService.java | 9 - .../cluster/routing/ShardRouting.java | 9 +- .../decider/DiskThresholdDecider.java | 5 +- .../decider/EnableAllocationDecider.java | 7 +- .../gateway/PrimaryShardAllocator.java | 196 +++++++--- .../gateway/ReplicaShardAllocator.java | 11 +- ...ransportNodesListGatewayStartedShards.java | 23 +- .../elasticsearch/index/shard/IndexShard.java | 3 +- .../admin/indices/create/CreateIndexIT.java | 7 + .../shards/IndicesShardStoreRequestIT.java | 3 +- .../IndicesShardStoreResponseTests.java | 21 +- .../cluster/routing/PrimaryAllocationIT.java | 103 +++++ .../gateway/PrimaryShardAllocatorTests.java | 358 ++++++++++++------ .../gateway/QuorumGatewayIT.java | 74 +--- .../gateway/ReplicaShardAllocatorTests.java | 18 +- .../index/shard/IndexShardTests.java | 8 +- .../indices/state/SimpleIndexStateIT.java | 11 +- docs/reference/index-modules.asciidoc | 13 - .../indices/shadow-replicas.asciidoc | 5 +- docs/reference/indices/shard-stores.asciidoc | 8 +- docs/reference/migration/migrate_3_0.asciidoc | 21 + .../cluster/shards_allocation.asciidoc | 5 +- .../test/ESAllocationTestCase.java | 4 +- 26 files changed, 645 insertions(+), 350 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java index 84b39d4c689..380f6e00890 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java @@ -56,13 +56,14 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public static class StoreStatus implements Streamable, ToXContent, Comparable { private DiscoveryNode node; private long version; + private String allocationId; private Throwable storeException; - private Allocation allocation; + private AllocationStatus allocationStatus; /** * The status of the shard store with respect to the cluster */ - public enum Allocation { + public enum AllocationStatus { /** * Allocated as primary @@ -81,16 +82,16 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon private final byte id; - Allocation(byte id) { + AllocationStatus(byte id) { this.id = id; } - private static Allocation fromId(byte id) { + private static AllocationStatus fromId(byte id) { switch (id) { case 0: return PRIMARY; case 1: return REPLICA; case 2: return UNUSED; - default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); + default: throw new IllegalArgumentException("unknown id for allocation status [" + id + "]"); } } @@ -99,11 +100,11 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon case 0: return "primary"; case 1: return "replica"; case 2: return "unused"; - default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); + default: throw new IllegalArgumentException("unknown id for allocation status [" + id + "]"); } } - private static Allocation readFrom(StreamInput in) throws IOException { + private static AllocationStatus readFrom(StreamInput in) throws IOException { return fromId(in.readByte()); } @@ -115,10 +116,11 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon private StoreStatus() { } - public StoreStatus(DiscoveryNode node, long version, Allocation allocation, Throwable storeException) { + public StoreStatus(DiscoveryNode node, long version, String allocationId, AllocationStatus allocationStatus, Throwable storeException) { this.node = node; this.version = version; - this.allocation = allocation; + this.allocationId = allocationId; + this.allocationStatus = allocationStatus; this.storeException = storeException; } @@ -130,13 +132,20 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } /** - * Version of the store, used to select the store that will be - * used as a primary. + * Version of the store */ public long getVersion() { return version; } + /** + * AllocationStatus id of the store, used to select the store that will be + * used as a primary. + */ + public String getAllocationId() { + return allocationId; + } + /** * Exception while trying to open the * shard index or from when the shard failed @@ -146,13 +155,13 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } /** - * The allocation status of the store. - * {@link Allocation#PRIMARY} indicates a primary shard copy - * {@link Allocation#REPLICA} indicates a replica shard copy - * {@link Allocation#UNUSED} indicates an unused shard copy + * The allocationStatus status of the store. + * {@link AllocationStatus#PRIMARY} indicates a primary shard copy + * {@link AllocationStatus#REPLICA} indicates a replica shard copy + * {@link AllocationStatus#UNUSED} indicates an unused shard copy */ - public Allocation getAllocation() { - return allocation; + public AllocationStatus getAllocationStatus() { + return allocationStatus; } static StoreStatus readStoreStatus(StreamInput in) throws IOException { @@ -165,7 +174,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public void readFrom(StreamInput in) throws IOException { node = DiscoveryNode.readNode(in); version = in.readLong(); - allocation = Allocation.readFrom(in); + allocationId = in.readOptionalString(); + allocationStatus = AllocationStatus.readFrom(in); if (in.readBoolean()) { storeException = in.readThrowable(); } @@ -175,7 +185,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public void writeTo(StreamOutput out) throws IOException { node.writeTo(out); out.writeLong(version); - allocation.writeTo(out); + out.writeOptionalString(allocationId); + allocationStatus.writeTo(out); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); @@ -188,7 +199,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { node.toXContent(builder, params); builder.field(Fields.VERSION, version); - builder.field(Fields.ALLOCATED, allocation.value()); + builder.field(Fields.ALLOCATION_ID, allocationId); + builder.field(Fields.ALLOCATED, allocationStatus.value()); if (storeException != null) { builder.startObject(Fields.STORE_EXCEPTION); ElasticsearchException.toXContent(builder, params, storeException); @@ -206,7 +218,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } else { int compare = Long.compare(other.version, version); if (compare == 0) { - return Integer.compare(allocation.id, other.allocation.id); + return Integer.compare(allocationStatus.id, other.allocationStatus.id); } return compare; } @@ -379,6 +391,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon static final XContentBuilderString STORES = new XContentBuilderString("stores"); // StoreStatus fields static final XContentBuilderString VERSION = new XContentBuilderString("version"); + static final XContentBuilderString ALLOCATION_ID = new XContentBuilderString("allocation_id"); static final XContentBuilderString STORE_EXCEPTION = new XContentBuilderString("store_exception"); static final XContentBuilderString ALLOCATED = new XContentBuilderString("allocation"); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 336ebc254b4..d345c0e7d45 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -179,8 +179,8 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc } for (NodeGatewayStartedShards response : fetchResponse.responses) { if (shardExistsInNode(response)) { - IndicesShardStoresResponse.StoreStatus.Allocation allocation = getAllocation(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode()); - storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), allocation, response.storeException())); + IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode()); + storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), response.allocationId(), allocationStatus, response.storeException())); } } CollectionUtil.timSort(storeStatuses); @@ -193,27 +193,27 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc listener.onResponse(new IndicesShardStoresResponse(indicesStoreStatusesBuilder.build(), Collections.unmodifiableList(failureBuilder))); } - private IndicesShardStoresResponse.StoreStatus.Allocation getAllocation(String index, int shardID, DiscoveryNode node) { + private IndicesShardStoresResponse.StoreStatus.AllocationStatus getAllocationStatus(String index, int shardID, DiscoveryNode node) { for (ShardRouting shardRouting : routingNodes.node(node.id())) { ShardId shardId = shardRouting.shardId(); if (shardId.id() == shardID && shardId.getIndex().equals(index)) { if (shardRouting.primary()) { - return IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY; } else if (shardRouting.assignedToNode()) { - return IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA; } else { - return IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED; } } } - return IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED; } /** * A shard exists/existed in a node only if shard state file exists in the node */ private boolean shardExistsInNode(final NodeGatewayStartedShards response) { - return response.storeException() != null || response.version() != -1; + return response.storeException() != null || response.version() != -1 || response.allocationId() != null; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 669d71477ca..93961bf1fbb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -621,7 +621,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } - + public Builder creationDate(long creationDate) { settings = settingsBuilder().put(settings).put(SETTING_CREATION_DATE, creationDate).build(); return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 1fa1b702f66..b38e99d4493 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -47,7 +47,6 @@ import org.elasticsearch.rest.RestStatus; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Locale; /** * Service responsible for submitting open/close index requests @@ -92,14 +91,6 @@ public class MetaDataIndexStateService extends AbstractComponent { } if (indexMetaData.getState() != IndexMetaData.State.CLOSE) { - IndexRoutingTable indexRoutingTable = currentState.routingTable().index(index); - for (IndexShardRoutingTable shard : indexRoutingTable) { - for (ShardRouting shardRouting : shard) { - if (shardRouting.primary() == true && shardRouting.allocatedPostIndexCreate() == false) { - throw new IndexPrimaryShardNotAllocatedException(new Index(index)); - } - } - } indicesToClose.add(index); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 8dd71e3fba5..5ffaee0f2f9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -267,7 +269,7 @@ public final class ShardRouting implements Streamable, ToXContent { return shardIdentifier; } - public boolean allocatedPostIndexCreate() { + public boolean allocatedPostIndexCreate(IndexMetaData indexMetaData) { if (active()) { return true; } @@ -279,6 +281,11 @@ public final class ShardRouting implements Streamable, ToXContent { return false; } + if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_3_0_0)) { + // when no shards with this id have ever been active for this index + return false; + } + return true; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index a02c72c5745..0df4959a5c5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -22,13 +22,13 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.hppc.ObjectLookupContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -360,7 +360,8 @@ public class DiskThresholdDecider extends AllocationDecider { } // a flag for whether the primary shard has been previously allocated - boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(); + IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData); // checks for exact byte comparisons if (freeBytes < freeBytesThresholdLow.bytes()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 0bbd4935044..a34cd33f7a1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -82,8 +83,8 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).getSettings(); - String enableIndexValue = indexSettings.get(INDEX_ROUTING_ALLOCATION_ENABLE); + IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + String enableIndexValue = indexMetaData.getSettings().get(INDEX_ROUTING_ALLOCATION_ENABLE); final Allocation enable; if (enableIndexValue != null) { enable = Allocation.parse(enableIndexValue); @@ -96,7 +97,7 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe case NONE: return allocation.decision(Decision.NO, NAME, "no allocations are allowed"); case NEW_PRIMARIES: - if (shardRouting.primary() && shardRouting.allocatedPostIndexCreate() == false) { + if (shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { return allocation.decision(Decision.YES, NAME, "new primary allocations are allowed"); } else { return allocation.decision(Decision.NO, NAME, "non-new primary allocations are forbidden"); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index e560b4458b7..79bfbdac8c2 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -20,6 +20,7 @@ package org.elasticsearch.gateway; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -30,8 +31,10 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import java.util.*; +import java.util.stream.Collectors; /** * The primary shard allocator allocates primary shard that were not created as @@ -39,6 +42,7 @@ import java.util.*; */ public abstract class PrimaryShardAllocator extends AbstractComponent { + @Deprecated public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards"; private final String initialShards; @@ -56,13 +60,21 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { - ShardRouting shard = unassignedIterator.next(); + final ShardRouting shard = unassignedIterator.next(); - if (needToFindPrimaryCopy(shard) == false) { + if (shard.primary() == false) { continue; } - AsyncShardFetch.FetchResult shardState = fetchData(shard, allocation); + final IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings, Collections.emptyList()); + + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { + // when we create a fresh index + continue; + } + + final AsyncShardFetch.FetchResult shardState = fetchData(shard, allocation); if (shardState.hasData() == false) { logger.trace("{}: ignoring allocation, still fetching shard started state", shard); allocation.setHasPendingAsyncFetch(); @@ -70,25 +82,50 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { continue; } - IndexMetaData indexMetaData = metaData.index(shard.getIndex()); - Settings indexSettings = Settings.builder().put(settings).put(indexMetaData.getSettings()).build(); + final Set lastActiveAllocationIds = indexMetaData.activeAllocationIds(shard.id()); + final boolean snapshotRestore = shard.restoreSource() != null; + final boolean recoverOnAnyNode = recoverOnAnyNode(indexSettings); - NodesAndVersions nodesAndVersions = buildNodesAndVersions(shard, recoverOnAnyNode(indexSettings), allocation.getIgnoreNodes(shard.shardId()), shardState); - logger.debug("[{}][{}] found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion); + final NodesAndVersions nodesAndVersions; + final boolean enoughAllocationsFound; - if (isEnoughAllocationsFound(shard, indexMetaData, nodesAndVersions) == false) { - // if we are restoring this shard we still can allocate - if (shard.restoreSource() == null) { + if (lastActiveAllocationIds.isEmpty()) { + assert indexSettings.getIndexVersionCreated().before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new"; + // when we load an old index (after upgrading cluster) or restore a snapshot of an old index + // fall back to old version-based allocation mode + // Note that once the shard has been active, lastActiveAllocationIds will be non-empty + nodesAndVersions = buildNodesAndVersions(shard, snapshotRestore || recoverOnAnyNode, allocation.getIgnoreNodes(shard.shardId()), shardState); + if (snapshotRestore || recoverOnAnyNode) { + enoughAllocationsFound = nodesAndVersions.allocationsFound > 0; + } else { + enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(shard, indexMetaData, nodesAndVersions); + } + logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), Version.V_3_0_0, nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion); + } else { + assert lastActiveAllocationIds.isEmpty() == false; + // use allocation ids to select nodes + nodesAndVersions = buildAllocationIdBasedNodes(shard, snapshotRestore || recoverOnAnyNode, + allocation.getIgnoreNodes(shard.shardId()), lastActiveAllocationIds, shardState); + enoughAllocationsFound = nodesAndVersions.allocationsFound > 0; + logger.debug("[{}][{}]: found {} allocations of {} based on allocation ids: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, lastActiveAllocationIds); + } + + if (enoughAllocationsFound == false){ + if (snapshotRestore) { + // let BalancedShardsAllocator take care of allocating this shard + logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource()); + } else if (recoverOnAnyNode) { + // let BalancedShardsAllocator take care of allocating this shard + logger.debug("[{}][{}]: missing local data, recover from any node", shard.index(), shard.id()); + } else { // we can't really allocate, so ignore it and continue unassignedIterator.removeAndIgnore(); logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound); - } else { - logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource()); } continue; } - NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions); + final NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions.nodes); if (nodesToAllocate.yesNodes.isEmpty() == false) { DiscoveryNode node = nodesToAllocate.yesNodes.get(0); logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node); @@ -109,63 +146,99 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } /** - * Does the shard need to find a primary copy? + * Builds a list of nodes. If matchAnyShard is set to false, only nodes that have an allocation id matching + * lastActiveAllocationIds are added to the list. Otherwise, any node that has a shard is added to the list, but + * entries with matching allocation id are always at the front of the list. */ - boolean needToFindPrimaryCopy(ShardRouting shard) { - if (shard.primary() == false) { - return false; + protected NodesAndVersions buildAllocationIdBasedNodes(ShardRouting shard, boolean matchAnyShard, Set ignoreNodes, + Set lastActiveAllocationIds, AsyncShardFetch.FetchResult shardState) { + List matchingNodes = new ArrayList<>(); + List nonMatchingNodes = new ArrayList<>(); + long highestVersion = -1; + for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) { + DiscoveryNode node = nodeShardState.getNode(); + String allocationId = nodeShardState.allocationId(); + + if (ignoreNodes.contains(node.id())) { + continue; + } + + if (nodeShardState.storeException() == null) { + if (allocationId == null && nodeShardState.version() != -1) { + // old shard with no allocation id, assign dummy value so that it gets added below in case of matchAnyShard + allocationId = "_n/a_"; + } + + logger.trace("[{}] on node [{}] has allocation id [{}] of shard", shard, nodeShardState.getNode(), allocationId); + } else { + logger.trace("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", nodeShardState.storeException(), shard, nodeShardState.getNode(), allocationId); + allocationId = null; + } + + if (allocationId != null) { + if (lastActiveAllocationIds.contains(allocationId)) { + matchingNodes.add(node); + highestVersion = Math.max(highestVersion, nodeShardState.version()); + } else if (matchAnyShard) { + nonMatchingNodes.add(node); + highestVersion = Math.max(highestVersion, nodeShardState.version()); + } + } } - // this is an API allocation, ignore since we know there is no data... - if (shard.allocatedPostIndexCreate() == false) { - return false; - } + List nodes = new ArrayList<>(); + nodes.addAll(matchingNodes); + nodes.addAll(nonMatchingNodes); - return true; + if (logger.isTraceEnabled()) { + logger.trace("{} candidates for allocation: {}", shard, nodes.stream().map(DiscoveryNode::name).collect(Collectors.joining(", "))); + } + return new NodesAndVersions(nodes, nodes.size(), highestVersion); } - private boolean isEnoughAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) { + /** + * used by old version-based allocation + */ + private boolean isEnoughVersionBasedAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) { // check if the counts meets the minimum set int requiredAllocation = 1; // if we restore from a repository one copy is more then enough - if (shard.restoreSource() == null) { - try { - String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); - if ("quorum".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; - } - } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 2) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); - } - } else if ("one".equals(initialShards)) { - requiredAllocation = 1; - } else if ("full".equals(initialShards) || "all".equals(initialShards)) { - requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; - } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = indexMetaData.getNumberOfReplicas(); - } - } else { - requiredAllocation = Integer.parseInt(initialShards); + try { + String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); + if ("quorum".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; } - } catch (Exception e) { - logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); + } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 2) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); + } + } else if ("one".equals(initialShards)) { + requiredAllocation = 1; + } else if ("full".equals(initialShards) || "all".equals(initialShards)) { + requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; + } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = indexMetaData.getNumberOfReplicas(); + } + } else { + requiredAllocation = Integer.parseInt(initialShards); } + } catch (Exception e) { + logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); } return nodesAndVersions.allocationsFound >= requiredAllocation; } /** - * Based on the nodes and versions, build the list of yes/no/throttle nodes that the shard applies to. + * Split the list of nodes to lists of yes/no/throttle nodes based on allocation deciders */ - private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, NodesAndVersions nodesAndVersions) { + private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, List nodes) { List yesNodes = new ArrayList<>(); List throttledNodes = new ArrayList<>(); List noNodes = new ArrayList<>(); - for (DiscoveryNode discoNode : nodesAndVersions.nodes) { + for (DiscoveryNode discoNode : nodes) { RoutingNode node = allocation.routingNodes().node(discoNode.id()); if (node == null) { continue; @@ -184,9 +257,11 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } /** - * Builds a list of nodes and version + * Builds a list of nodes. If matchAnyShard is set to false, only nodes that have the highest shard version + * are added to the list. Otherwise, any node that has a shard is added to the list, but entries with highest + * version are always at the front of the list. */ - NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean recoveryOnAnyNode, Set ignoreNodes, + NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean matchAnyShard, Set ignoreNodes, AsyncShardFetch.FetchResult shardState) { final Map nodesWithVersion = new HashMap<>(); int numberOfAllocationsFound = 0; @@ -208,20 +283,15 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { version = -1; } - if (recoveryOnAnyNode) { - numberOfAllocationsFound++; - if (version > highestVersion) { - highestVersion = version; - } - // We always put the node without clearing the map - nodesWithVersion.put(node, version); - } else if (version != -1) { + if (version != -1) { numberOfAllocationsFound++; // If we've found a new "best" candidate, clear the // current candidates and add it if (version > highestVersion) { highestVersion = version; - nodesWithVersion.clear(); + if (matchAnyShard == false) { + nodesWithVersion.clear(); + } nodesWithVersion.put(node, version); } else if (version == highestVersion) { // If the candidate is the same, add it to the @@ -258,9 +328,9 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { * Return {@code true} if the index is configured to allow shards to be * recovered on any node */ - private boolean recoverOnAnyNode(Settings idxSettings) { - return IndexMetaData.isOnSharedFilesystem(idxSettings) && - idxSettings.getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); + private boolean recoverOnAnyNode(IndexSettings indexSettings) { + return indexSettings.isOnSharedFilesystem() + && indexSettings.getSettings().getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); } protected abstract AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation); diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index c87f4d94755..0b5f2bc58d9 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -24,6 +24,8 @@ import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -56,6 +58,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { */ public boolean processExistingRecoveries(RoutingAllocation allocation) { boolean changed = false; + MetaData metaData = allocation.metaData(); for (RoutingNodes.RoutingNodesIterator nodes = allocation.routingNodes().nodes(); nodes.hasNext(); ) { nodes.next(); for (RoutingNodes.RoutingNodeIterator it = nodes.nodeShards(); it.hasNext(); ) { @@ -69,8 +72,10 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { if (shard.relocatingNodeId() != null) { continue; } + // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - if (shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } @@ -114,6 +119,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { boolean changed = false; final RoutingNodes routingNodes = allocation.routingNodes(); final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); + MetaData metaData = allocation.metaData(); while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); if (shard.primary()) { @@ -121,7 +127,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - if (shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index d91b4bd8cdd..539ac924262 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -139,7 +139,8 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction Store.tryOpenIndex(shardPath.resolveIndex()); } catch (Exception exception) { logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId, shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : ""); - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, exception); + String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, exception); } } // old shard metadata doesn't have the actual index UUID so we need to check if the actual uuid in the metadata @@ -149,11 +150,12 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction logger.warn("{} shard state info found but indexUUID didn't match expected [{}] actual [{}]", shardId, indexUUID, shardStateMetaData.indexUUID); } else { logger.debug("{} shard state info found: [{}]", shardId, shardStateMetaData); - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version); + String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId); } } logger.trace("{} no local shard info found", shardId); - return new NodeGatewayStartedShards(clusterService.localNode(), -1); + return new NodeGatewayStartedShards(clusterService.localNode(), -1, null); } catch (Exception e) { throw new ElasticsearchException("failed to load started shards", e); } @@ -277,17 +279,19 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction public static class NodeGatewayStartedShards extends BaseNodeResponse { private long version = -1; + private String allocationId = null; private Throwable storeException = null; public NodeGatewayStartedShards() { } - public NodeGatewayStartedShards(DiscoveryNode node, long version) { - this(node, version, null); + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId) { + this(node, version, allocationId, null); } - public NodeGatewayStartedShards(DiscoveryNode node, long version, Throwable storeException) { + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, Throwable storeException) { super(node); this.version = version; + this.allocationId = allocationId; this.storeException = storeException; } @@ -295,6 +299,10 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction return this.version; } + public String allocationId() { + return this.allocationId; + } + public Throwable storeException() { return this.storeException; } @@ -303,16 +311,17 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction public void readFrom(StreamInput in) throws IOException { super.readFrom(in); version = in.readLong(); + allocationId = in.readOptionalString(); if (in.readBoolean()) { storeException = in.readThrowable(); } - } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(version); + out.writeOptionalString(allocationId); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c0bf9244673..03c0611d172 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1099,7 +1099,8 @@ public class IndexShard extends AbstractIndexShardComponent { // we are the first primary, recover from the gateway // if its post api allocation, the index should exists assert shardRouting.primary() : "recover from store only makes sense if the shard is a primary shard"; - final boolean shouldExist = shardRouting.allocatedPostIndexCreate(); + boolean shouldExist = shardRouting.allocatedPostIndexCreate(idxSettings.getIndexMetaData()); + StoreRecovery storeRecovery = new StoreRecovery(shardId, logger); return storeRecovery.recoverFromStore(this, shouldExist, localNode); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index bb154218215..3ce9e99f4dc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -285,4 +285,11 @@ public class CreateIndexIT extends ESIntegTestCase { assertThat(messages.toString(), containsString("mapper [text] is used by multiple types")); } } + + public void testRestartIndexCreationAfterFullClusterRestart() throws Exception { + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("cluster.routing.allocation.enable", "none")).get(); + client().admin().indices().prepareCreate("test").setSettings(indexSettings()).get(); + internalCluster().fullRestart(); + ensureGreen("test"); + } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index ffb9e630b70..ebd32ccb482 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -87,6 +87,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { for (ObjectCursor> shardStoreStatuses : shardStores.values()) { for (IndicesShardStoresResponse.StoreStatus storeStatus : shardStoreStatuses.value) { assertThat(storeStatus.getVersion(), greaterThan(-1l)); + assertThat(storeStatus.getAllocationId(), notNullValue()); assertThat(storeStatus.getNode(), notNullValue()); assertThat(storeStatus.getStoreException(), nullValue()); } @@ -108,7 +109,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { assertThat(shardStoresStatuses.size(), equalTo(unassignedShards.size())); for (IntObjectCursor> storesStatus : shardStoresStatuses) { assertThat("must report for one store", storesStatus.value.size(), equalTo(1)); - assertThat("reported store should be primary", storesStatus.value.get(0).getAllocation(), equalTo(IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY)); + assertThat("reported store should be primary", storesStatus.value.get(0).getAllocationStatus(), equalTo(IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY)); } logger.info("--> enable allocation"); enableAllocation(index); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index cf197a27faf..6d1159c82a5 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.shards; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -44,9 +45,9 @@ public class IndicesShardStoreResponseTests extends ESTestCase { DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT); DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, Version.CURRENT); List storeStatusList = new ArrayList<>(); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, 2, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, null)); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED, new IOException("corrupted"))); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, null, IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, 2, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED, new IOException("corrupted"))); storeStatuses.put(0, storeStatusList); storeStatuses.put(1, storeStatusList); ImmutableOpenIntMap> storesMap = storeStatuses.build(); @@ -89,8 +90,10 @@ public class IndicesShardStoreResponseTests extends ESTestCase { IndicesShardStoresResponse.StoreStatus storeStatus = storeStatusList.get(i); assertThat(storeInfo.containsKey("version"), equalTo(true)); assertThat(((int) storeInfo.get("version")), equalTo(((int) storeStatus.getVersion()))); + assertThat(storeInfo.containsKey("allocation_id"), equalTo(true)); + assertThat(((String) storeInfo.get("allocation_id")), equalTo((storeStatus.getAllocationId()))); assertThat(storeInfo.containsKey("allocation"), equalTo(true)); - assertThat(((String) storeInfo.get("allocation")), equalTo(storeStatus.getAllocation().value())); + assertThat(((String) storeInfo.get("allocation")), equalTo(storeStatus.getAllocationStatus().value())); assertThat(storeInfo.containsKey(storeStatus.getNode().id()), equalTo(true)); if (storeStatus.getStoreException() != null) { assertThat(storeInfo.containsKey("store_exception"), equalTo(true)); @@ -104,11 +107,11 @@ public class IndicesShardStoreResponseTests extends ESTestCase { public void testStoreStatusOrdering() throws Exception { DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT); List orderedStoreStatuses = new ArrayList<>(); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 2, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, new IOException("corrupted"))); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 2, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, new IOException("corrupted"))); List storeStatuses = new ArrayList<>(orderedStoreStatuses); Collections.shuffle(storeStatuses, random()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java new file mode 100644 index 00000000000..dcd35303b75 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -0,0 +1,103 @@ +package org.elasticsearch.cluster.routing; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.disruption.NetworkDisconnectPartition; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@ESIntegTestCase.SuppressLocalMode +public class PrimaryAllocationIT extends ESIntegTestCase { + + public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + logger.info("--> starting 3 nodes, 1 master, 2 data"); + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + internalCluster().startDataOnlyNodesAsync(2).get(); + + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 1)).get()); + ensureGreen(); + logger.info("--> indexing..."); + client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + refresh(); + + ClusterState state = client().admin().cluster().prepareState().all().get().getState(); + List shards = state.routingTable().allShards("test"); + assertThat(shards.size(), equalTo(2)); + + final String primaryNode; + final String replicaNode; + if (shards.get(0).primary()) { + primaryNode = state.getRoutingNodes().node(shards.get(0).currentNodeId()).node().name(); + replicaNode = state.getRoutingNodes().node(shards.get(1).currentNodeId()).node().name(); + } else { + primaryNode = state.getRoutingNodes().node(shards.get(1).currentNodeId()).node().name(); + replicaNode = state.getRoutingNodes().node(shards.get(0).currentNodeId()).node().name(); + } + + NetworkDisconnectPartition partition = new NetworkDisconnectPartition( + new HashSet<>(Arrays.asList(master, replicaNode)), Collections.singleton(primaryNode), random()); + internalCluster().setDisruptionScheme(partition); + logger.info("--> partitioning node with primary shard from rest of cluster"); + partition.startDisrupting(); + + ensureStableCluster(2, master); + + logger.info("--> index a document into previous replica shard (that is now primary)"); + client(replicaNode).prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + + logger.info("--> shut down node that has new acknowledged document"); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); + + ensureStableCluster(1, master); + + partition.stopDisrupting(); + + logger.info("--> waiting for node with old primary shard to rejoin the cluster"); + ensureStableCluster(2, master); + + logger.info("--> check that old primary shard does not get promoted to primary again"); + // kick reroute and wait for all shard states to be fetched + client(master).admin().cluster().prepareReroute().get(); + assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); + // kick reroute a second time and check that all shards are unassigned + assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); + + logger.info("--> starting node that reuses data folder with the up-to-date primary shard"); + internalCluster().startDataOnlyNode(Settings.EMPTY); + + logger.info("--> check that the up-to-date primary shard gets promoted and that documents are available"); + ensureYellow("test"); + assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); + } + + public void testNotWaitForQuorumCopies() throws Exception { + logger.info("--> starting 3 nodes"); + internalCluster().startNodesAsync(3).get(); + logger.info("--> creating index with 1 primary and 2 replicas"); + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", randomIntBetween(1, 3)).put("index.number_of_replicas", 2)).get()); + ensureGreen("test"); + client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + logger.info("--> removing 2 nodes from cluster"); + internalCluster().stopRandomDataNode(); + internalCluster().stopRandomDataNode(); + internalCluster().fullRestart(); + logger.info("--> checking that index still gets allocated with only 1 shard copy being available"); + ensureYellow("test"); + assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 1l); + } +} diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 73cbb51faed..193985a1c68 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; -import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -59,25 +59,29 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { this.testAllocator = new TestAllocator(); } - /** - * Verifies that the canProcess method of primary allocation behaves correctly - * and processes only the applicable shard. - */ - public void testNoProcessReplica() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - assertThat(testAllocator.needToFindPrimaryCopy(shard), equalTo(false)); - } - - public void testNoProcessPrimayNotAllcoatedBefore() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, true, ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); - assertThat(testAllocator.needToFindPrimaryCopy(shard), equalTo(false)); + public void testNoProcessPrimaryNotAllocatedBefore() { + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), randomBoolean(), Version.CURRENT); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), true, Version.V_2_1_0); + } + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + assertThat(allocation.routingNodes().unassigned().iterator().next().shardId(), equalTo(shardId)); } /** * Tests that when async fetch returns that there is no data, the shard will not be allocated. */ public void testNoAsyncFetchData() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "allocId"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -85,11 +89,17 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests when the node returns that no data was found for it (-1), it will be moved to ignore unassigned. + * Tests when the node returns that no data was found for it (-1 for version and null for allocation id), + * it will be moved to ignore unassigned. */ public void testNoAllocationFound() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, -1); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "allocId"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); + } + testAllocator.addData(node1, -1, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -97,11 +107,43 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests when the node returns that no data was found for it (-1), it will be moved to ignore unassigned. + * Tests when the node returns data with a shard allocation id that does not match active allocation ids, it will be moved to ignore unassigned. + */ + public void testNoMatchingAllocationIdFound() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "id2"); + testAllocator.addData(node1, 1, "id1"); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); + assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + } + + /** + * Tests that when there is a node to allocate the shard to, and there are no active allocation ids, it will be allocated to it. + * This is the case when we have old shards from pre-3.0 days. + */ + public void testNoActiveAllocationIds() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); + testAllocator.addData(node1, 1, null); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node1.id())); + } + + /** + * Tests when the node returns that no data was found for it, it will be moved to ignore unassigned. */ public void testStoreException() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 3, new CorruptIndexException("test", "test")); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1", new CorruptIndexException("test", "test")); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); + testAllocator.addData(node1, 3, null, new CorruptIndexException("test", "test")); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -112,8 +154,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * Tests that when there is a node to allocate the shard to, it will be allocated to it. */ public void testFoundAllocationAndAllocating() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_2_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -126,8 +174,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * it will be moved to ignore unassigned until it can be allocated to. */ public void testFoundAllocationButThrottlingDecider() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, Version.V_2_2_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -139,8 +193,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * force the allocation to it. */ public void testFoundAllocationButNoDecider() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, Version.V_2_0_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -149,11 +209,11 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests that the highest version node is chosed for allocation. + * Tests that the highest version node is chosen for allocation. */ - public void testAllocateToTheHighestVersion() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 10).addData(node2, 12); + public void testAllocateToTheHighestVersionOnLegacyIndex() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_0_0); + testAllocator.addData(node1, 10, null).addData(node2, 12, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -162,35 +222,150 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests that when restoring from snapshot, even if we didn't find any node to allocate on, the shard - * will remain in the unassigned list to be allocated later. + * Tests that when restoring from a snapshot and we find a node with a shard copy and allocation + * deciders say yes, we allocate to that node. */ - public void testRestoreIgnoresNoNodesToAllocate() { - MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex())) - .build(); - ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) - .metaData(metaData) - .routingTable(routingTable) - .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), state.getRoutingNodes(), state.nodes(), null, System.nanoTime()); + public void testRestore() { + RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } - testAllocator.addData(node1, -1).addData(node2, -1); + /** + * Tests that when restoring from a snapshot and we find a node with a shard copy and allocation + * deciders say throttle, we add it to ignored shards. + */ + public void testRestoreThrottle() { + RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + } + + /** + * Tests that when restoring from a snapshot and we find a node with a shard copy but allocation + * deciders say no, we still allocate to that node. + */ + public void testRestoreForcesAllocateIfShardAvailable() { + RoutingAllocation allocation = getRestoreRoutingAllocation(noAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "some allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when restoring from a snapshot and we don't find a node with a shard copy, the shard will remain in + * the unassigned list to be allocated later. + */ + public void testRestoreDoesNotAssignIfNoShardAvailable() { + RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, -1, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + } + + private RoutingAllocation getRestoreRoutingAllocation(AllocationDeciders allocationDeciders) { + Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0) + .putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndex())) + .build(); + ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy and allocation + * deciders say yes, we allocate to that node. + */ + public void testRecoverOnAnyNode() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy and allocation + * deciders say throttle, we add it to ignored shards. + */ + public void testRecoverOnAnyNodeThrottle() { + RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy but allocation + * deciders say no, we still allocate to that node. + */ + public void testRecoverOnAnyNodeForcesAllocateIfShardAvailable() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(noAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we don't find a node with a shard copy we let + * BalancedShardAllocator assign the shard + */ + public void testRecoverOnAnyNodeDoesNotAssignIfNoShardAvailable() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, -1, null); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + } + + private RoutingAllocation getRecoverOnAnyNodeRoutingAllocation(AllocationDeciders allocationDeciders) { + Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version) + .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) + .put(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, true)) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex())) + .build(); + ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); } /** * Tests that only when enough copies of the shard exists we are going to allocate it. This test * verifies that with same version (1), and quorum allocation. */ - public void testEnoughCopiesFoundForAllocation() { + public void testEnoughCopiesFoundForAllocationOnLegacyIndex() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -207,7 +382,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1); + testAllocator.addData(node1, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -215,7 +390,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 1); + testAllocator.addData(node2, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -229,9 +404,9 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * Tests that only when enough copies of the shard exists we are going to allocate it. This test * verifies that even with different version, we treat different versions as a copy, and count them. */ - public void testEnoughCopiesFoundForAllocationWithDifferentVersion() { + public void testEnoughCopiesFoundForAllocationOnLegacyIndexWithDifferentVersion() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -248,7 +423,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1); + testAllocator.addData(node1, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -256,7 +431,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 2); + testAllocator.addData(node2, 2, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -266,67 +441,20 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node2.id())); } - public void testAllocationOnAnyNodeWithSharedFs() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, - ShardRoutingState.UNASSIGNED, 0, - new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - - Map data = new HashMap<>(); - data.put(node1, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node1, 1)); - data.put(node2, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node2, 5)); - data.put(node3, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node3, -1)); - AsyncShardFetch.FetchResult fetches = - new AsyncShardFetch.FetchResult(shardId, data, new HashSet<>(), new HashSet<>()); - - PrimaryShardAllocator.NodesAndVersions nAndV = testAllocator.buildNodesAndVersions(shard, false, new HashSet(), fetches); - assertThat(nAndV.allocationsFound, equalTo(2)); - assertThat(nAndV.highestVersion, equalTo(5L)); - assertThat(nAndV.nodes, contains(node2)); - - nAndV = testAllocator.buildNodesAndVersions(shard, true, new HashSet(), fetches); - assertThat(nAndV.allocationsFound, equalTo(3)); - assertThat(nAndV.highestVersion, equalTo(5L)); - // All three nodes are potential candidates because shards can be recovered on any node - assertThat(nAndV.nodes, contains(node2, node1, node3)); - } - - public void testAllocationOnAnyNodeShouldPutNodesWithExceptionsLast() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, - ShardRoutingState.UNASSIGNED, 0, - new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - - Map data = new HashMap<>(); - data.put(node1, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node1, 1)); - data.put(node2, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node2, 1)); - data.put(node3, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node3, 1, new IOException("I failed to open"))); - HashSet ignoredNodes = new HashSet<>(); - ignoredNodes.add(node2.id()); - AsyncShardFetch.FetchResult fetches = - new AsyncShardFetch.FetchResult(shardId, data, new HashSet<>(), ignoredNodes); - - PrimaryShardAllocator.NodesAndVersions nAndV = testAllocator.buildNodesAndVersions(shard, false, ignoredNodes, fetches); - assertThat(nAndV.allocationsFound, equalTo(1)); - assertThat(nAndV.highestVersion, equalTo(1L)); - assertThat(nAndV.nodes, contains(node1)); - - nAndV = testAllocator.buildNodesAndVersions(shard, true, ignoredNodes, fetches); - assertThat(nAndV.allocationsFound, equalTo(2)); - assertThat(nAndV.highestVersion, equalTo(1L)); - // node3 should be last here - assertThat(nAndV.nodes.size(), equalTo(2)); - assertThat(nAndV.nodes, contains(node1, node3)); - } - - private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders) { + private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsRecovery(metaData.index(shardId.getIndex())) - .build(); + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, new HashSet<>(Arrays.asList(activeAllocationIds)))) + .build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + if (asNew) { + routingTableBuilder.addAsNew(metaData.index(shardId.getIndex())); + } else { + routingTableBuilder.addAsRecovery(metaData.index(shardId.getIndex())); + } ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) .metaData(metaData) - .routingTable(routingTable) + .routingTable(routingTableBuilder.build()) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); return new RoutingAllocation(deciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); } @@ -344,15 +472,15 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return this; } - public TestAllocator addData(DiscoveryNode node, long version) { - return addData(node, version, null); + public TestAllocator addData(DiscoveryNode node, long version, String allocationId) { + return addData(node, version, allocationId, null); } - public TestAllocator addData(DiscoveryNode node, long version, @Nullable Throwable storeException) { + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, @Nullable Throwable storeException) { if (data == null) { data = new HashMap<>(); } - data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, storeException)); + data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, storeException)); return this; } diff --git a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index edde1720474..69c518eb9c6 100644 --- a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -20,10 +20,10 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -32,14 +32,10 @@ import org.elasticsearch.test.InternalTestCluster.RestartCallback; import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.Requests.clusterHealthRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; /** * @@ -51,72 +47,12 @@ public class QuorumGatewayIT extends ESIntegTestCase { return 2; } - public void testChangeInitialShardsRecovery() throws Exception { - logger.info("--> starting 3 nodes"); - final String[] nodes = internalCluster().startNodesAsync(3).get().toArray(new String[0]); - - createIndex("test"); - ensureGreen(); - NumShards test = getNumShards("test"); - - logger.info("--> indexing..."); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); - //We don't check for failures in the flush response: if we do we might get the following: - // FlushNotAllowedEngineException[[test][1] recovery is in progress, flush [COMMIT_TRANSLOG] is not allowed] - flush(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); - refresh(); - - for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); - } - - final String nodeToRemove = nodes[between(0,2)]; - logger.info("--> restarting 1 nodes -- kill 2"); - internalCluster().fullRestart(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - return Settings.EMPTY; - } - - @Override - public boolean doRestart(String nodeName) { - return nodeToRemove.equals(nodeName); - } - }); - if (randomBoolean()) { - Thread.sleep(between(1, 400)); // wait a bit and give is a chance to try to allocate - } - ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForNodes("1")).actionGet(); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.RED)); // nothing allocated yet - assertTrue(awaitBusy(() -> { - ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get(); - return clusterStateResponse.getState() != null && clusterStateResponse.getState().routingTable().index("test") != null; - })); // wait until we get a cluster state - could be null if we quick enough. - final ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get(); - assertThat(clusterStateResponse.getState(), notNullValue()); - assertThat(clusterStateResponse.getState().routingTable().index("test"), notNullValue()); - assertThat(clusterStateResponse.getState().routingTable().index("test").allPrimaryShardsActive(), is(false)); - logger.info("--> change the recovery.initial_shards setting, and make sure its recovered"); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("recovery.initial_shards", 1)).get(); - - logger.info("--> running cluster_health (wait for the shards to startup), primaries only since we only have 1 node"); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(test.numPrimaries)).actionGet(); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); - - for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); - } - } - public void testQuorumRecovery() throws Exception { logger.info("--> starting 3 nodes"); - internalCluster().startNodesAsync(3).get(); // we are shutting down nodes - make sure we don't have 2 clusters if we test network - setMinimumMasterNodes(2); + internalCluster().startNodesAsync(3, + Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2).build()).get(); + createIndex("test"); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 9a053b36527..0818999ea7e 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -43,9 +43,11 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -275,13 +277,16 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { } private RoutingAllocation onePrimaryOnNode1And1Replica(AllocationDeciders deciders, Settings settings, UnassignedInfo.Reason reason) { + ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)).numberOfShards(1).numberOfReplicas(0)) - .build(); + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)) + .numberOfShards(1).numberOfReplicas(1) + .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) + .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shardId) - .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(primaryShard) .addShard(ShardRouting.newUnassigned(shardId.getIndex(), shardId.getId(), null, false, new UnassignedInfo(reason, null))) .build()) ) @@ -294,13 +299,16 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { } private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) { + ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(1) + .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shardId) - .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(primaryShard) .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node2.id(), null, null, false, ShardRoutingState.INITIALIZING, 10, new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null))) .build()) ) diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index b5f33afa94c..8dce7f1f954 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -133,7 +133,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardId id = new ShardId("foo", 1); long version = between(1, Integer.MAX_VALUE / 2); boolean primary = randomBoolean(); - AllocationId allocationId = randomAllocationId(); + AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData state1 = new ShardStateMetaData(version, primary, "foo", allocationId); write(state1, env.availableShardPaths(id)); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(id)); @@ -288,7 +288,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testShardStateMetaHashCodeEquals() { - ShardStateMetaData meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomAllocationId()); + AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); + ShardStateMetaData meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); assertEquals(meta, new ShardStateMetaData(meta.version, meta.primary, meta.indexUUID, meta.allocationId)); assertEquals(meta.hashCode(), new ShardStateMetaData(meta.version, meta.primary, meta.indexUUID, meta.allocationId).hashCode()); @@ -299,7 +300,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertFalse(meta.equals(new ShardStateMetaData(meta.version, !meta.primary, meta.indexUUID + "foo", randomAllocationId()))); Set hashCodes = new HashSet<>(); for (int i = 0; i < 30; i++) { // just a sanity check that we impl hashcode - meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomAllocationId()); + allocationId = randomBoolean() ? null : randomAllocationId(); + meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); hashCodes.add(meta.hashCode()); } assertTrue("more than one unique hashcode expected but got: " + hashCodes.size(), hashCodes.size() > 1); diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index a5cfa816455..af9cfeb94c1 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -97,7 +97,7 @@ public class SimpleIndexStateIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); } - public void testFastCloseAfterCreateDoesNotClose() { + public void testFastCloseAfterCreateContinuesCreateAfterOpen() { logger.info("--> creating test index that cannot be allocated"); client().admin().indices().prepareCreate("test").setSettings(Settings.settingsBuilder() .put("index.routing.allocation.include.tag", "no_such_node").build()).get(); @@ -106,17 +106,14 @@ public class SimpleIndexStateIT extends ESIntegTestCase { assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); - try { - client().admin().indices().prepareClose("test").get(); - fail("Exception should have been thrown"); - } catch(IndexPrimaryShardNotAllocatedException e) { - // expected - } + client().admin().indices().prepareClose("test").get(); logger.info("--> updating test index settings to allow allocation"); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.settingsBuilder() .put("index.routing.allocation.include.tag", "").build()).get(); + client().admin().indices().prepareOpen("test").get(); + logger.info("--> waiting for green status"); ensureGreen(); diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index a2b73a44842..56e9d4ddb91 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -129,19 +129,6 @@ specific index module: experimental[] Disables the purge of <> on the current index. -[[index.recovery.initial_shards]]`index.recovery.initial_shards`:: -+ --- -A primary shard is only recovered only if there are enough nodes available to -allocate sufficient replicas to form a quorum. It can be set to: - - * `quorum` (default) - * `quorum-1` (or `half`) - * `full` - * `full-1`. - * Number values are also supported, e.g. `1`. --- - [float] === Settings in other index modules diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index da74a651242..0d589adb64a 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -104,9 +104,8 @@ settings API: `index.shared_filesystem.recover_on_any_node`:: Boolean value indicating whether the primary shards for the index should be - allowed to recover on any node in the cluster, regardless of the number of - replicas or whether the node has previously had the shard allocated to it - before. Defaults to `false`. + allowed to recover on any node in the cluster. If a node holding a copy of + the shard is found, recovery prefers that node. Defaults to `false`. === Node level settings related to shadow replicas diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index d4d385bd6dc..19acbc44d3f 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -52,8 +52,9 @@ The shard stores information is grouped by indices and shard ids. } }, "version": 4, <4> + "allocation_id": "2iNySv_OQVePRX-yaRH_lQ", <5> "allocation" : "primary" | "replica" | "unused", <6> - "store_exception": ... <5> + "store_exception": ... <7> }, ... ] @@ -66,7 +67,8 @@ The shard stores information is grouped by indices and shard ids. <3> The node information that hosts a copy of the store, the key is the unique node id. <4> The version of the store copy -<5> The status of the store copy, whether it is used as a +<5> The allocation id of the store copy +<6> The status of the store copy, whether it is used as a primary, replica or not used at all -<6> Any exception encountered while opening the shard index or +<7> Any exception encountered while opening the shard index or from earlier engine failure diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 6588f22a85a..0179e289b99 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -14,6 +14,7 @@ your application to Elasticsearch 3.0. * <> * <> * <> +* <> [[breaking_30_search_changes]] === Search changes @@ -515,3 +516,23 @@ from `OsStats.Cpu#getPercent`. Only stored fields are retrievable with this option. The fields option won't be able to load non stored fields from _source anymore. +[[breaking_30_allocation]] +=== Primary shard allocation + +Previously, primary shards were only assigned if a quorum of shard copies were found (configurable using +`index.recovery.initial_shards`, now deprecated). In case where a primary had only a single replica, quorum was defined +to be a single shard. This meant that any shard copy of an index with replication factor 1 could become primary, even it +was a stale copy of the data on disk. This is now fixed by using allocation IDs. + +Allocation IDs assign unique identifiers to shard copies. This allows the cluster to differentiate between multiple +copies of the same data and track which shards have been active, so that after a cluster restart, shard copies +containing only the most recent data can become primaries. + +==== `index.shared_filesystem.recover_on_any_node` changes + +The behavior of `index.shared_filesystem.recover_on_any_node = true` has been changed. Previously, in the case where no +shard copies could be found, an arbitrary node was chosen by potentially ignoring allocation deciders. Now, we take +balancing into account but don't assign the shard if the allocation deciders are not satisfied. The behavior has also changed +in the case where shard copies can be found. Previously, a node not holding the shard copy was chosen if none of the nodes +holding shard copies were satisfying the allocation deciders. Now, the shard will be assigned to a node having a shard copy, +even if none of the nodes holding a shard copy satisfy the allocation deciders. diff --git a/docs/reference/modules/cluster/shards_allocation.asciidoc b/docs/reference/modules/cluster/shards_allocation.asciidoc index 1daf131106d..b8073927a0f 100644 --- a/docs/reference/modules/cluster/shards_allocation.asciidoc +++ b/docs/reference/modules/cluster/shards_allocation.asciidoc @@ -22,9 +22,8 @@ Enable or disable allocation for specific kinds of shards: This setting does not affect the recovery of local primary shards when restarting a node. A restarted node that has a copy of an unassigned primary -shard will recover that primary immediately, assuming that the -<> setting is -satisfied. +shard will recover that primary immediately, assuming that its allocation id matches +one of the active allocation ids in the cluster state. -- diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index e82823ae997..e6a25a3956a 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -230,7 +231,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { boolean changed = false; while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); - if (shard.primary() || shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex()); + if (shard.primary() || shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } changed |= replicaShardAllocator.ignoreUnassignedIfDelayed(unassignedIterator, shard); From f19056ec209538ecdd23b382c2f3f41d9b588d3a Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Dec 2015 16:25:36 +0100 Subject: [PATCH 096/322] Fix documentation for running REST tests using Gradle --- TESTING.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 569c16b0747..fef23d0cd3d 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -290,14 +290,14 @@ The REST tests are run automatically when executing the "gradle check" command. REST tests use the following command: --------------------------------------------------------------------------- -gradle :distribution:tar:integTest \ +gradle :distribution:integ-test-zip:integTest \ -Dtests.class=org.elasticsearch.test.rest.RestIT --------------------------------------------------------------------------- A specific test case can be run with --------------------------------------------------------------------------- -gradle :distribution:tar:integTest \ +gradle :distribution:integ-test-zip:integTest \ -Dtests.class=org.elasticsearch.test.rest.RestIT \ -Dtests.method="test {p0=cat.shards/10_basic/Help}" --------------------------------------------------------------------------- From 91eed30a143bd5f40c72f5fef27ce46311d0c30e Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 17 Dec 2015 15:27:41 +0100 Subject: [PATCH 097/322] BulkProcessor backs off exponentially by default With this commit we change the default behavior of BulkProcessor from not backing off when getting EsRejectedExecutionException to backing off exponentially. --- .../java/org/elasticsearch/action/bulk/BulkProcessor.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 316ec7a548e..af5af80ac2f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -78,7 +78,7 @@ public class BulkProcessor implements Closeable { private int bulkActions = 1000; private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB); private TimeValue flushInterval = null; - private BackoffPolicy backoffPolicy = BackoffPolicy.noBackoff(); + private BackoffPolicy backoffPolicy = BackoffPolicy.exponentialBackoff(); /** * Creates a builder of bulk processor with the client to use and the listener that will be used @@ -140,7 +140,9 @@ public class BulkProcessor implements Closeable { * Sets a custom backoff policy. The backoff policy defines how the bulk processor should handle retries of bulk requests internally * in case they have failed due to resource constraints (i.e. a thread pool was full). * - * The default is to not back off, i.e. failing immediately. + * The default is to back off exponentially. + * + * @see org.elasticsearch.action.bulk.BackoffPolicy#exponentialBackoff() */ public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) { if (backoffPolicy == null) { @@ -162,7 +164,7 @@ public class BulkProcessor implements Closeable { if (client == null) { throw new NullPointerException("The client you specified while building a BulkProcessor is null"); } - + return new Builder(client, listener); } From 9dfc9397d464d1334a9c4569d904908fcbcab150 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 17 Dec 2015 16:59:05 +0100 Subject: [PATCH 098/322] Temporarily deactivate spuriously failing test --- core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 5728f7b54bc..636cea5c8aa 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -107,6 +107,7 @@ public class RetryTests extends ESTestCase { listener.assertOnFailureNeverCalled(); } + @AwaitsFix(bugUrl = "spuriously fails on Jenkins. Investigation ongoing.") public void testAsyncRetryFailsAfterBacksOff() throws Exception { BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); AssertingListener listener = new AssertingListener(); From 517fc113fe4e4c96e9a96d16f8a5b8800c2f727b Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 17 Dec 2015 17:01:29 +0100 Subject: [PATCH 099/322] Fix spans extraction to not also include individual terms. This is a bug that I introduced in #13239 while thinking that the differences were due to changes in Lucene: extractUnknownQuery is also called when span extraction already succeeded, so we should only fall back to Weight.extractTerms if no spans have been extracted yet. Close #15291 --- .../search/highlight/CustomQueryScorer.java | 2 +- .../search/highlight/HighlighterSearchIT.java | 6 +-- .../highlight/PlainHighlighterTests.java | 42 +++++++++++++++++++ 3 files changed, 46 insertions(+), 4 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java index 9e86edef47d..8ad24b5cb19 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java @@ -82,7 +82,7 @@ public final class CustomQueryScorer extends QueryScorer { } else if (query instanceof FiltersFunctionScoreQuery) { query = ((FiltersFunctionScoreQuery) query).getSubQuery(); extract(query, query.getBoost(), terms); - } else { + } else if (terms.isEmpty()) { extractWeightedTerms(terms, query, query.getBoost()); } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 63378baa721..4063ec81a28 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -1557,7 +1557,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .fragmenter("simple"))).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -1566,7 +1566,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .fragmenter("span"))).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); assertFailures(client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -2062,7 +2062,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); } public void testPostingsHighlighterMultipleFields() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java b/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java new file mode 100644 index 00000000000..5156209d6f1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.highlight; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.highlight.QueryScorer; +import org.apache.lucene.util.LuceneTestCase; + +public class PlainHighlighterTests extends LuceneTestCase { + + public void testHighlightPhrase() throws Exception { + Query query = new PhraseQuery.Builder() + .add(new Term("field", "foo")) + .add(new Term("field", "bar")) + .build(); + QueryScorer queryScorer = new CustomQueryScorer(query); + org.apache.lucene.search.highlight.Highlighter highlighter = new org.apache.lucene.search.highlight.Highlighter(queryScorer); + String[] frags = highlighter.getBestFragments(new MockAnalyzer(random()), "field", "bar foo bar foo", 10); + assertArrayEquals(new String[] {"bar foo bar foo"}, frags); + } + +} From 6ea16671f43617536ebb0d03468fe74bbc7bab37 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 17 Dec 2015 14:54:57 +0100 Subject: [PATCH 100/322] Simplify the Text API. We have the Text API, which is essentially a wrapper around a String and a BytesReference and then we have 3 implementations depending on whether the String view should be cached, the BytesReference view should be cached, or both should be cached. This commit merges everything into a single Text that is essentially the old StringAndBytesText impl. Long term we should look into whether this API has any performance benefit or if we could just use plain strings. This would greatly simplify all our other APIs that currently use Text. --- .../TransportShardMultiPercolateAction.java | 3 - .../service/InternalClusterService.java | 4 +- .../common/io/stream/StreamInput.java | 6 +- .../elasticsearch/common/text/BytesText.java | 82 ------------- .../common/text/StringAndBytesText.java | 111 ------------------ .../elasticsearch/common/text/StringText.java | 94 --------------- .../org/elasticsearch/common/text/Text.java | 96 ++++++++++++--- .../index/mapper/DocumentMapper.java | 6 +- .../percolator/PercolateContext.java | 4 +- .../percolator/PercolatorService.java | 14 +-- .../search/SearchShardTarget.java | 6 +- .../bucket/histogram/InternalHistogram.java | 3 +- .../search/fetch/FetchPhase.java | 4 +- .../highlight/FastVectorHighlighter.java | 6 +- .../search/highlight/HighlightField.java | 3 +- .../search/highlight/PlainHighlighter.java | 6 +- .../search/highlight/PostingsHighlighter.java | 4 +- .../search/internal/InternalSearchHit.java | 10 +- .../completion/CompletionSuggester.java | 6 +- .../suggest/phrase/PhraseSuggester.java | 7 +- .../search/suggest/term/TermSuggester.java | 6 +- .../search/highlight/CustomHighlighter.java | 5 +- .../internal/InternalSearchHitTests.java | 14 +-- .../search/suggest/CustomSuggester.java | 6 +- .../messy/tests/SimpleSortTests.java | 10 +- .../TransportDeleteByQueryActionTests.java | 4 +- 26 files changed, 140 insertions(+), 380 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/text/BytesText.java delete mode 100644 core/src/main/java/org/elasticsearch/common/text/StringAndBytesText.java delete mode 100644 core/src/main/java/org/elasticsearch/common/text/StringText.java diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index 68bcdc1503d..1d29e6c3971 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; @@ -37,8 +36,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; -import org.elasticsearch.common.text.Text; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index b8c898a31e9..020e8d8da2d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -50,7 +50,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -357,7 +357,7 @@ public class InternalClusterService extends AbstractLifecycleComponent { +public final class Text implements Comparable { + + public static final Text[] EMPTY_ARRAY = new Text[0]; + + public static Text[] convertFromStringArray(String[] strings) { + if (strings.length == 0) { + return EMPTY_ARRAY; + } + Text[] texts = new Text[strings.length]; + for (int i = 0; i < strings.length; i++) { + texts[i] = new Text(strings[i]); + } + return texts; + } + + private BytesReference bytes; + private String text; + private int hash; + + public Text(BytesReference bytes) { + this.bytes = bytes; + } + + public Text(String text) { + this.text = text; + } /** - * Are bytes available without the need to be converted into bytes when calling {@link #bytes()}. + * Whether a {@link BytesReference} view of the data is already materialized. */ - boolean hasBytes(); + public boolean hasBytes() { + return bytes != null; + } /** - * The UTF8 bytes representing the the text, might be converted on the fly, see {@link #hasBytes()} + * Returns a {@link BytesReference} view of the data. */ - BytesReference bytes(); + public BytesReference bytes() { + if (bytes == null) { + bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8)); + } + return bytes; + } /** - * Is there a {@link String} representation of the text. If not, then it {@link #hasBytes()}. + * Whether a {@link String} view of the data is already materialized. */ - boolean hasString(); + public boolean hasString() { + return text != null; + } /** - * Returns the string representation of the text, might be converted to a string on the fly. + * Returns a {@link String} view of the data. */ - String string(); + public String string() { + if (text == null) { + if (!bytes.hasArray()) { + bytes = bytes.toBytesArray(); + } + text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8); + } + return text; + } - /** - * Returns the string representation of the text, might be converted to a string on the fly. - */ @Override - String toString(); + public String toString() { + return string(); + } + + @Override + public int hashCode() { + if (hash == 0) { + hash = bytes().hashCode(); + } + return hash; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + return bytes().equals(((Text) obj).bytes()); + } + + @Override + public int compareTo(Text text) { + return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 24374806717..333cda459f7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; @@ -114,7 +114,7 @@ public class DocumentMapper implements ToXContent { private final MapperService mapperService; private final String type; - private final StringAndBytesText typeText; + private final Text typeText; private volatile CompressedXContent mappingSource; @@ -138,7 +138,7 @@ public class DocumentMapper implements ToXContent { ReentrantReadWriteLock mappingLock) { this.mapperService = mapperService; this.type = rootObjectMapper.name(); - this.typeText = new StringAndBytesText(this.type); + this.typeText = new Text(this.type); this.mapping = new Mapping( Version.indexCreated(indexSettings), rootObjectMapper, diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 70abaaaff3d..5b09b55f8dc 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.HasHeaders; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; @@ -164,7 +164,7 @@ public class PercolateContext extends SearchContext { fields.put(field.name(), new InternalSearchHitField(field.name(), Collections.emptyList())); } hitContext().reset( - new InternalSearchHit(0, "unknown", new StringText(parsedDocument.type()), fields), + new InternalSearchHit(0, "unknown", new Text(parsedDocument.type()), fields), atomicReaderContext, 0, docSearcher.searcher() ); } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index fa7b47766a8..eb33f3832b4 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -52,8 +52,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.BytesText; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -533,10 +531,10 @@ public class PercolatorService extends AbstractComponent { List finalMatches = new ArrayList<>(requestedSize == 0 ? numMatches : requestedSize); outer: for (PercolateShardResponse response : shardResults) { - Text index = new StringText(response.getIndex()); + Text index = new Text(response.getIndex()); for (int i = 0; i < response.matches().length; i++) { float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i]; - Text match = new BytesText(new BytesArray(response.matches()[i])); + Text match = new Text(new BytesArray(response.matches()[i])); Map hl = response.hls().isEmpty() ? null : response.hls().get(i); finalMatches.add(new PercolateResponse.Match(index, match, score, hl)); if (requestedSize != 0 && finalMatches.size() == requestedSize) { @@ -686,10 +684,10 @@ public class PercolatorService extends AbstractComponent { List finalMatches = new ArrayList<>(requestedSize); if (nonEmptyResponses == 1) { PercolateShardResponse response = shardResults.get(firstNonEmptyIndex); - Text index = new StringText(response.getIndex()); + Text index = new Text(response.getIndex()); for (int i = 0; i < response.matches().length; i++) { float score = response.scores().length == 0 ? Float.NaN : response.scores()[i]; - Text match = new BytesText(new BytesArray(response.matches()[i])); + Text match = new Text(new BytesArray(response.matches()[i])); if (!response.hls().isEmpty()) { Map hl = response.hls().get(i); finalMatches.add(new PercolateResponse.Match(index, match, score, hl)); @@ -728,8 +726,8 @@ public class PercolatorService extends AbstractComponent { slots[requestIndex]++; PercolateShardResponse shardResponse = shardResults.get(requestIndex); - Text index = new StringText(shardResponse.getIndex()); - Text match = new BytesText(new BytesArray(shardResponse.matches()[itemIndex])); + Text index = new Text(shardResponse.getIndex()); + Text match = new Text(new BytesArray(shardResponse.matches()[itemIndex])); float score = shardResponse.scores()[itemIndex]; if (!shardResponse.hls().isEmpty()) { Map hl = shardResponse.hls().get(itemIndex); diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 1a12751d396..c648436c3a9 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -42,8 +42,8 @@ public class SearchShardTarget implements Streamable, Comparable extends Inter @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (formatter != ValueFormatter.RAW) { - Text keyTxt = new StringText(formatter.format(key)); + Text keyTxt = new Text(formatter.format(key)); if (keyed) { builder.startObject(keyTxt.string()); } else { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 227141e4ddf..04890700be8 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -198,7 +198,7 @@ public class FetchPhase implements SearchPhase { DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { - typeText = new StringAndBytesText(fieldsVisitor.uid().type()); + typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index 65702dd24b5..b57899b2e17 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.vectorhighlight.SimpleFieldFragList; import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder; import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -159,7 +159,7 @@ public class FastVectorHighlighter implements Highlighter { } if (fragments != null && fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); @@ -170,7 +170,7 @@ public class FastVectorHighlighter implements Highlighter { fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java index 9077278d515..30530b697f3 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.highlight; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -90,7 +89,7 @@ public class HighlightField implements Streamable { if (in.readBoolean()) { int size = in.readVInt(); if (size == 0) { - fragments = StringText.EMPTY_ARRAY; + fragments = Text.EMPTY_ARRAY; } else { fragments = new Text[size]; for (int i = 0; i < size; i++) { diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 041ed754d76..5f4cdddb060 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -33,9 +33,7 @@ import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; @@ -158,7 +156,7 @@ public class PlainHighlighter implements Highlighter { } if (fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); @@ -172,7 +170,7 @@ public class PlainHighlighter implements Highlighter { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { - return new HighlightField(highlighterContext.fieldName, new Text[] { new StringText(fieldContents.substring(0, end)) }); + return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); } } return null; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index e11840e89e7..2509f95da59 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -28,7 +28,7 @@ import org.apache.lucene.search.postingshighlight.CustomSeparatorBreakIterator; import org.apache.lucene.search.postingshighlight.Snippet; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -122,7 +122,7 @@ public class PostingsHighlighter implements Highlighter { } if (fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } return null; diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index 96fd103fa6f..fcac5b1cc8b 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -104,14 +104,14 @@ public class InternalSearchHit implements SearchHit { public InternalSearchHit(int docId, String id, Text type, Map fields) { this.docId = docId; - this.id = new StringAndBytesText(id); + this.id = new Text(id); this.type = type; this.fields = fields; } public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map fields) { this.docId = nestedTopDocId; - this.id = new StringAndBytesText(id); + this.id = new Text(id); this.type = type; this.nestedIdentity = nestedIdentity; this.fields = fields; @@ -339,7 +339,7 @@ public class InternalSearchHit implements SearchHit { if (sortValues != null) { for (int i = 0; i < sortValues.length; i++) { if (sortValues[i] instanceof BytesRef) { - sortValuesCopy[i] = new StringAndBytesText(new BytesArray((BytesRef) sortValues[i])); + sortValuesCopy[i] = new Text(new BytesArray((BytesRef) sortValues[i])); } } } @@ -783,7 +783,7 @@ public class InternalSearchHit implements SearchHit { private InternalNestedIdentity child; public InternalNestedIdentity(String field, int offset, InternalNestedIdentity child) { - this.field = new StringAndBytesText(field); + this.field = new Text(field); this.offset = offset; this.child = child; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 106672ae7ae..6a0155ffb7a 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -30,7 +30,7 @@ import org.apache.lucene.search.suggest.document.TopSuggestDocs; import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector; import org.apache.lucene.util.*; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MappedFieldType; @@ -57,7 +57,7 @@ public class CompletionSuggester extends Suggester } CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize()); spare.copyUTF8Bytes(suggestionContext.getText()); - CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry(new StringText(spare.toString()), 0, spare.length()); + CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry(new Text(spare.toString()), 0, spare.length()); completionSuggestion.addTerm(completionSuggestEntry); TopSuggestDocsCollector collector = new TopDocumentsCollector(suggestionContext.getSize()); suggest(searcher, suggestionContext.toQuery(), collector); @@ -91,7 +91,7 @@ public class CompletionSuggester extends Suggester } if (numResult++ < suggestionContext.getSize()) { CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( - new StringText(suggestDoc.key.toString()), suggestDoc.score, contexts, payload); + new Text(suggestDoc.key.toString()), suggestDoc.score, contexts, payload); completionSuggestEntry.addOption(option); } else { break; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index fccf9ebc30e..c7fa6fae302 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -30,7 +30,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.ParsedQuery; @@ -127,11 +126,11 @@ public final class PhraseSuggester extends Suggester { if (!collateMatch && !collatePrune) { continue; } - Text phrase = new StringText(spare.toString()); + Text phrase = new Text(spare.toString()); Text highlighted = null; if (suggestion.getPreTag() != null) { spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag())); - highlighted = new StringText(spare.toString()); + highlighted = new Text(spare.toString()); } if (collatePrune) { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); @@ -147,7 +146,7 @@ public final class PhraseSuggester extends Suggester { private PhraseSuggestion.Entry buildResultEntry(PhraseSuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { spare.copyUTF8Bytes(suggestion.getText()); - return new PhraseSuggestion.Entry(new StringText(spare.toString()), 0, spare.length(), cutoffScore); + return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } ScriptService scriptService() { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index 4c1b176c990..34cd3ad4d56 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -27,8 +27,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.text.BytesText; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; @@ -54,10 +52,10 @@ public final class TermSuggester extends Suggester { SuggestWord[] suggestedWords = directSpellChecker.suggestSimilar( token.term, suggestion.getShardSize(), indexReader, suggestion.getDirectSpellCheckerSettings().suggestMode() ); - Text key = new BytesText(new BytesArray(token.term.bytes())); + Text key = new Text(new BytesArray(token.term.bytes())); TermSuggestion.Entry resultEntry = new TermSuggestion.Entry(key, token.startOffset, token.endOffset - token.startOffset); for (SuggestWord suggestWord : suggestedWords) { - Text word = new StringText(suggestWord.string); + Text word = new Text(suggestWord.string); resultEntry.addOption(new TermSuggestion.Entry.Option(word, suggestWord.freq, suggestWord.score)); } response.addTerm(resultEntry); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java index 5a8d7c0150a..05b999a9196 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.highlight; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; @@ -52,12 +51,12 @@ public class CustomHighlighter implements Highlighter { } List responses = new ArrayList<>(); - responses.add(new StringText(String.format(Locale.ENGLISH, "standard response for %s at position %s", field.field(), + responses.add(new Text(String.format(Locale.ENGLISH, "standard response for %s at position %s", field.field(), cacheEntry.position))); if (field.fieldOptions().options() != null) { for (Map.Entry entry : field.fieldOptions().options().entrySet()) { - responses.add(new StringText("field:" + entry.getKey() + ":" + entry.getValue())); + responses.add(new Text("field:" + entry.getKey() + ":" + entry.getValue())); } } diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index cc631d5df2a..0525fd28db1 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.internal; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -39,25 +39,25 @@ public class InternalSearchHitTests extends ESTestCase { SearchShardTarget target = new SearchShardTarget("_node_id", "_index", 0); Map innerHits = new HashMap<>(); - InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit1.shardTarget(target); - InternalSearchHit innerInnerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerInnerHit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerInnerHit2.shardTarget(target); innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerInnerHit2}, 1, 1f)); innerHit1.setInnerHits(innerHits); - InternalSearchHit innerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit2.shardTarget(target); - InternalSearchHit innerHit3 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit3 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit3.shardTarget(target); innerHits = new HashMap<>(); - InternalSearchHit hit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit hit1 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerHit1, innerHit2}, 1, 1f)); innerHits.put("2", new InternalSearchHits(new InternalSearchHit[]{innerHit3}, 1, 1f)); hit1.shardTarget(target); hit1.setInnerHits(innerHits); - InternalSearchHit hit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit hit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); hit2.shardTarget(target); InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[]{hit1, hit2}, 2, 1f); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 281cf6ae18e..419316b5265 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import java.io.IOException; import java.util.Locale; @@ -42,11 +42,11 @@ public class CustomSuggester extends Suggester> response = new Suggest.Suggestion<>(name, suggestion.getSize()); String firstSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12"); - Suggest.Suggestion.Entry resultEntry12 = new Suggest.Suggestion.Entry<>(new StringText(firstSuggestion), 0, text.length() + 2); + Suggest.Suggestion.Entry resultEntry12 = new Suggest.Suggestion.Entry<>(new Text(firstSuggestion), 0, text.length() + 2); response.addTerm(resultEntry12); String secondSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123"); - Suggest.Suggestion.Entry resultEntry123 = new Suggest.Suggestion.Entry<>(new StringText(secondSuggestion), 0, text.length() + 3); + Suggest.Suggestion.Entry resultEntry123 = new Suggest.Suggestion.Entry<>(new Text(secondSuggestion), 0, text.length() + 3); response.addTerm(resultEntry123); return response; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index db8a13c5ab8..de9b5b5f4f5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -1633,8 +1633,7 @@ public class SimpleSortTests extends ESIntegTestCase { assertThat(hits[i].getSortValues().length, is(1)); Object o = hits[i].getSortValues()[0]; assertThat(o, notNullValue()); - assertThat(o instanceof StringAndBytesText, is(true)); - StringAndBytesText text = (StringAndBytesText) o; + Text text = (Text) o; assertThat(text.string(), is("bar")); } @@ -1650,8 +1649,7 @@ public class SimpleSortTests extends ESIntegTestCase { assertThat(hits[i].getSortValues().length, is(1)); Object o = hits[i].getSortValues()[0]; assertThat(o, notNullValue()); - assertThat(o instanceof StringAndBytesText, is(true)); - StringAndBytesText text = (StringAndBytesText) o; + Text text = (Text) o; assertThat(text.string(), is("bar bar")); } } @@ -1925,7 +1923,7 @@ public class SimpleSortTests extends ESIntegTestCase { .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("string")).get(); assertSortValues(resp, - new Object[] {new StringAndBytesText("bcd"), null}, + new Object[] {new Text("bcd"), null}, new Object[] {null, null}); resp = client().prepareSearch("test1", "test2") diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java index c44608c4e4b..57bfa4c2328 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.CountDown; @@ -339,7 +339,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { final int nbDocs = randomIntBetween(0, 20); SearchHit[] docs = new SearchHit[nbDocs]; for (int i = 0; i < nbDocs; i++) { - InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new StringText("type"), null); + InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new Text("type"), null); doc.shard(new SearchShardTarget("node", "test", randomInt())); docs[i] = doc; } From 87f800cdca235de255ffaefb7777888da59dc420 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 17 Dec 2015 17:26:11 +0100 Subject: [PATCH 101/322] make it updateable --- core/src/main/java/org/elasticsearch/transport/Transport.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index 8e4b7a47b1b..16270234494 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -36,7 +36,7 @@ import java.util.Map; public interface Transport extends LifecycleComponent { - Setting TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", false, Setting.Scope.CLUSTER); + Setting TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", true, Setting.Scope.CLUSTER); public static class TransportSettings { public static final String TRANSPORT_TCP_COMPRESS = "transport.tcp.compress"; From 51fc09b2642afae23ef76569e022e5ade2b99e1d Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Thu, 17 Dec 2015 17:57:55 +0100 Subject: [PATCH 102/322] Temporarily deactivate spuriously failing test --- core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 636cea5c8aa..5b9ab898b0e 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -91,6 +91,7 @@ public class RetryTests extends ESTestCase { assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); } + @AwaitsFix(bugUrl = "spuriously fails on Jenkins. Investigation ongoing.") public void testAsyncRetryBacksOff() throws Exception { BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); AssertingListener listener = new AssertingListener(); From bf7a89e304b5b2afc38aeccb2835ab6850084dd1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Dec 2015 12:18:39 -0500 Subject: [PATCH 103/322] Remove optionality from streaming useCompressedOops field --- core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index 22f0ed912d0..82a99bd0bd1 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -375,7 +375,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.readFrom(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); - useCompressedOops = in.readOptionalString(); + useCompressedOops = in.readString(); } @Override @@ -400,7 +400,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.writeTo(out); out.writeStringArray(gcCollectors); out.writeStringArray(memoryPools); - out.writeOptionalString(useCompressedOops); + out.writeString(useCompressedOops); } public static class Mem implements Streamable { From 315b0c263d1988905a5b989d79abb01515e1fc5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Nov 2015 23:46:07 +0100 Subject: [PATCH 104/322] Geo: Make rest of ShapeBuilders implement Writable This is the second part of making ShapeBuilders implement Writable. This PR add serialization, equality and hashCode to all remaining ShapeBuilders that don't implement it yet. --- .../common/geo/builders/CircleBuilder.java | 2 - .../common/geo/builders/EnvelopeBuilder.java | 1 - .../geo/builders/MultiPointBuilder.java | 43 +++++++++++++- .../common/geo/builders/PointBuilder.java | 2 - .../AbstractShapeBuilderTestCase.java | 7 ++- .../geo/builders/MultiPointBuilderTests.java | 56 +++++++++++++++++++ 6 files changed, 100 insertions(+), 11 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index 5f11d12a4bf..cdfc89a7f94 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -36,8 +36,6 @@ public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; - public static final CircleBuilder PROTOTYPE = new CircleBuilder(); - private DistanceUnit unit; private double radius; private Coordinate center; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 62f29d2bad7..770bc4ffdef 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -33,7 +33,6 @@ import java.util.Objects; public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; - public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); protected Coordinate topLeft; protected Coordinate bottomRight; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 8d5cfabdabb..517cfa05048 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -22,16 +22,18 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; -import org.elasticsearch.common.geo.XShapeCollection; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; public class MultiPointBuilder extends PointCollection { - public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; @Override @@ -61,4 +63,39 @@ public class MultiPointBuilder extends PointCollection { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(points); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiPointBuilder other = (MultiPointBuilder) obj; + return Objects.equals(points, other.points); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(points.size()); + for (Coordinate point : points) { + writeCoordinateTo(point, out); + } + } + + @Override + public ShapeBuilder readFrom(StreamInput in) throws IOException { + MultiPointBuilder multiPointBuilder = new MultiPointBuilder(); + int size = in.readVInt(); + for (int i=0; i < size; i++) { + multiPointBuilder.point(readCoordinateFrom(in)); + } + return multiPointBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index d6d62c28b8c..e7070333666 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -33,8 +33,6 @@ public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - public static final PointBuilder PROTOTYPE = new PointBuilder(); - private Coordinate coordinate; public PointBuilder coordinate(Coordinate coordinate) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index f15a731e86e..ce1663fad79 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -44,9 +44,10 @@ public abstract class AbstractShapeBuilderTestCase exte public static void init() { if (namedWriteableRegistry == null) { namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new PointBuilder()); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new CircleBuilder()); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new EnvelopeBuilder()); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new MultiPointBuilder()); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java new file mode 100644 index 00000000000..49c256a4362 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiPointBuilder createTestShapeBuilder() { + return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT); + } + + @Override + protected MultiPointBuilder mutate(MultiPointBuilder original) throws IOException { + MultiPointBuilder mutation = copyShape(original); + Coordinate[] coordinates = original.coordinates(false); + Coordinate coordinate = randomFrom(coordinates); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + return mutation.points(coordinates); + } +} From 31f90c91af6e084391c392d0eef81118015861ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Nov 2015 13:48:50 +0100 Subject: [PATCH 105/322] Making LineStringBuilder and MultiLineStringBuilder writable and add equals/hashCode --- .../common/geo/builders/CircleBuilder.java | 2 + .../common/geo/builders/EnvelopeBuilder.java | 2 + .../geo/builders/LineStringBuilder.java | 44 +++++++++++++ .../geo/builders/MultiLineStringBuilder.java | 44 +++++++++++++ .../geo/builders/MultiPointBuilder.java | 4 +- .../common/geo/builders/PointBuilder.java | 1 + .../AbstractShapeBuilderTestCase.java | 16 ++--- .../geo/builders/LineStringBuilderTests.java | 57 +++++++++++++++++ .../builders/MultiLineStringBuilderTests.java | 62 +++++++++++++++++++ 9 files changed, 224 insertions(+), 8 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index cdfc89a7f94..ddafecc4e7f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -36,6 +36,8 @@ public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; + static final CircleBuilder PROTOTYPE = new CircleBuilder(); + private DistanceUnit unit; private double radius; private Coordinate center; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 770bc4ffdef..62eea070032 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -34,6 +34,8 @@ public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); + protected Coordinate topLeft; protected Coordinate bottomRight; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index c7ba9b72f55..71d9bee8686 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -22,8 +22,12 @@ package org.elasticsearch.common.geo.builders; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Objects; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; + import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; @@ -34,6 +38,8 @@ public class LineStringBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; + static final LineStringBuilder PROTOTYPE = new LineStringBuilder(); + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -139,4 +145,42 @@ public class LineStringBuilder extends PointCollection { } return coordinates; } + + @Override + public int hashCode() { + return Objects.hash(points, translated); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + LineStringBuilder other = (LineStringBuilder) obj; + return Objects.equals(points, other.points) && + (translated == other.translated); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(points.size()); + for (Coordinate point : points) { + writeCoordinateTo(point, out); + } + out.writeBoolean(translated); + } + + @Override + public LineStringBuilder readFrom(StreamInput in) throws IOException { + LineStringBuilder lineStringBuilder = new LineStringBuilder(); + int size = in.readVInt(); + for (int i=0; i < size; i++) { + lineStringBuilder.point(readCoordinateFrom(in)); + } + lineStringBuilder.translated = in.readBoolean(); + return lineStringBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index a004b90a2dc..16b36d81037 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import com.spatial4j.core.shape.Shape; @@ -29,11 +31,14 @@ import com.vividsolutions.jts.geom.LineString; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; +import java.util.Objects; public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; + static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); + private final ArrayList lines = new ArrayList<>(); public MultiLineStringBuilder linestring(LineStringBuilder line) { @@ -41,6 +46,10 @@ public class MultiLineStringBuilder extends ShapeBuilder { return this; } + public MultiLineStringBuilder linestring(Coordinate[] coordinates) { + return this.linestring(new LineStringBuilder().points(coordinates)); + } + public Coordinate[][] coordinates() { Coordinate[][] result = new Coordinate[lines.size()][]; for (int i = 0; i < result.length; i++) { @@ -92,4 +101,39 @@ public class MultiLineStringBuilder extends ShapeBuilder { } return jtsGeometry(geometry); } + + @Override + public int hashCode() { + return Objects.hash(lines); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiLineStringBuilder other = (MultiLineStringBuilder) obj; + return Objects.equals(lines, other.lines); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(lines.size()); + for (LineStringBuilder line : lines) { + line.writeTo(out); + } + } + + @Override + public MultiLineStringBuilder readFrom(StreamInput in) throws IOException { + MultiLineStringBuilder multiLineStringBuilder = new MultiLineStringBuilder(); + int size = in.readVInt(); + for (int i=0; i < size; i++) { + multiLineStringBuilder.linestring(LineStringBuilder.PROTOTYPE.readFrom(in)); + } + return multiLineStringBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 517cfa05048..4f2ad8cbc57 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -36,6 +36,8 @@ public class MultiPointBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; + final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(); + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -90,7 +92,7 @@ public class MultiPointBuilder extends PointCollection { } @Override - public ShapeBuilder readFrom(StreamInput in) throws IOException { + public MultiPointBuilder readFrom(StreamInput in) throws IOException { MultiPointBuilder multiPointBuilder = new MultiPointBuilder(); int size = in.readVInt(); for (int i=0; i < size; i++) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index e7070333666..22c293f4939 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -32,6 +32,7 @@ import com.vividsolutions.jts.geom.Coordinate; public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; + static final PointBuilder PROTOTYPE = new PointBuilder(); private Coordinate coordinate; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index ce1663fad79..3889e00c4ea 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -44,10 +44,12 @@ public abstract class AbstractShapeBuilderTestCase exte public static void init() { if (namedWriteableRegistry == null) { namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new PointBuilder()); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new CircleBuilder()); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new EnvelopeBuilder()); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, new MultiPointBuilder()); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); } } @@ -94,9 +96,9 @@ public abstract class AbstractShapeBuilderTestCase exte for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB testShape = createTestShapeBuilder(); SB deserializedShape = copyShape(testShape); - assertEquals(deserializedShape, testShape); - assertEquals(deserializedShape.hashCode(), testShape.hashCode()); - assertNotSame(deserializedShape, testShape); + assertEquals(testShape, deserializedShape); + assertEquals(testShape.hashCode(), deserializedShape.hashCode()); + assertNotSame(testShape, deserializedShape); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java new file mode 100644 index 00000000000..e4e483c0fcb --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class LineStringBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected LineStringBuilder createTestShapeBuilder() { + LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING); + return lsb; + } + + @Override + protected LineStringBuilder mutate(LineStringBuilder original) throws IOException { + LineStringBuilder mutation = copyShape(original); + Coordinate[] coordinates = original.coordinates(false); + Coordinate coordinate = randomFrom(coordinates); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + return mutation.points(coordinates); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java new file mode 100644 index 00000000000..07698683e87 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiLineStringBuilder createTestShapeBuilder() { + return (MultiLineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTILINESTRING); + } + + @Override + protected MultiLineStringBuilder mutate(MultiLineStringBuilder original) throws IOException { + MultiLineStringBuilder mutation = copyShape(original); + Coordinate[][] coordinates = mutation.coordinates(); + int lineToChange = randomInt(coordinates.length - 1); + for (int i = 0; i < coordinates.length; i++) { + Coordinate[] line = coordinates[i]; + if (i == lineToChange) { + Coordinate coordinate = randomFrom(line); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + } + } + return mutation; + } +} From ae7e8bbaf0138f287430be741fbd8e013d8fb392 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Nov 2015 17:08:32 +0100 Subject: [PATCH 106/322] Making PolygonBuilder writable and add equals/hashCode --- .../common/geo/builders/PolygonBuilder.java | 186 +++++------------- .../AbstractShapeBuilderTestCase.java | 3 +- .../geo/builders/LineStringBuilderTests.java | 3 + .../geo/builders/PolygonBuilderTests.java | 71 +++++++ 4 files changed, 127 insertions(+), 136 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 04540df27e9..13b068b2e5b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -29,6 +29,8 @@ import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,6 +41,8 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.List; +import java.util.Objects; /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains @@ -48,6 +52,9 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; + static final PolygonBuilder PROTOTYPE = new PolygonBuilder(); + + private static final Coordinate[][] EMPTY = new Coordinate[0][]; // line string defining the shell of the polygon private LineStringBuilder shell; @@ -103,6 +110,20 @@ public class PolygonBuilder extends ShapeBuilder { return this; } + /** + * @return the list of holes defined for this polygon + */ + public List holes() { + return this.holes; + } + + /** + * @return the list of points of the shell for this polygon + */ + public LineStringBuilder shell() { + return this.shell; + } + /** * Close the shell of the polygon */ @@ -357,8 +378,6 @@ public class PolygonBuilder extends ShapeBuilder { return result; } - private static final Coordinate[][] EMPTY = new Coordinate[0][]; - private static Coordinate[][] holes(Edge[] holes, int numHoles) { if (numHoles == 0) { return EMPTY; @@ -520,147 +539,44 @@ public class PolygonBuilder extends ShapeBuilder { return points.length-1; } - /** - * Create a connected list of a list of coordinates - * - * @param points - * array of point - * @param offset - * index of the first point - * @param length - * number of points - * @return Array of edges - */ - private static Edge[] ring(int component, boolean direction, boolean handedness, LineStringBuilder shell, - Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { - // calculate the direction of the points: - // find the point a the top of the set and check its - // neighbors orientation. So direction is equivalent - // to clockwise/counterclockwise - final int top = top(points, offset, length); - final int prev = (offset + ((top + length - 1) % length)); - final int next = (offset + ((top + 1) % length)); - boolean orientation = points[offset + prev].x > points[offset + next].x; - - // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) - // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards - // thus if orientation is computed as cw, the logic will translate points across dateline - // and convert to a right handed system - - // compute the bounding box and calculate range - double[] range = range(points, offset, length); - final double rng = range[1] - range[0]; - // translate the points if the following is true - // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres - // (translation would result in a collapsed poly) - // 2. the shell of the candidate hole has been translated (to preserve the coordinate system) - boolean incorrectOrientation = component == 0 && handedness != orientation; - if ( (incorrectOrientation && (rng > DATELINE && rng != 2*DATELINE)) || (translated.get() && component != 0)) { - translate(points); - // flip the translation bit if the shell is being translated - if (component == 0) { - translated.set(true); - } - // correct the orientation post translation (ccw for shell, cw for holes) - if (component == 0 || (component != 0 && handedness == orientation)) { - orientation = !orientation; - } - } - return concat(component, direction ^ orientation, points, offset, edges, toffset, length); + @Override + public int hashCode() { + return Objects.hash(shell, holes, orientation); } - private static final int top(Coordinate[] points, int offset, int length) { - int top = 0; // we start at 1 here since top points to 0 - for (int i = 1; i < length; i++) { - if (points[offset + i].y < points[offset + top].y) { - top = i; - } else if (points[offset + i].y == points[offset + top].y) { - if (points[offset + i].x < points[offset + top].x) { - top = i; - } - } + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; } - return top; + if (obj == null || getClass() != obj.getClass()) { + return false; + } + PolygonBuilder other = (PolygonBuilder) obj; + return Objects.equals(shell, other.shell) && + Objects.equals(holes, other.holes) && + Objects.equals(orientation, other.orientation); } - private static final double[] range(Coordinate[] points, int offset, int length) { - double minX = points[0].x; - double maxX = points[0].x; - double minY = points[0].y; - double maxY = points[0].y; - // compute the bounding coordinates (@todo: cleanup brute force) - for (int i = 1; i < length; ++i) { - if (points[offset + i].x < minX) { - minX = points[offset + i].x; - } - if (points[offset + i].x > maxX) { - maxX = points[offset + i].x; - } - if (points[offset + i].y < minY) { - minY = points[offset + i].y; - } - if (points[offset + i].y > maxY) { - maxY = points[offset + i].y; - } + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(orientation == Orientation.RIGHT); + shell.writeTo(out); + out.writeVInt(holes.size()); + for (LineStringBuilder hole : holes) { + hole.writeTo(out); } - return new double[] {minX, maxX, minY, maxY}; } - /** - * Concatenate a set of points to a polygon - * - * @param component - * component id of the polygon - * @param direction - * direction of the ring - * @param points - * list of points to concatenate - * @param pointOffset - * index of the first point - * @param edges - * Array of edges to write the result to - * @param edgeOffset - * index of the first edge in the result - * @param length - * number of points to use - * @return the edges creates - */ - private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset, - int length) { - assert edges.length >= length+edgeOffset; - assert points.length >= length+pointOffset; - edges[edgeOffset] = new Edge(points[pointOffset], null); - for (int i = 1; i < length; i++) { - if (direction) { - edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]); - edges[edgeOffset + i].component = component; - } else if(!edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i])) { - edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null); - edges[edgeOffset + i - 1].component = component; - } else { - throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]); - } - } - - if (direction) { - edges[edgeOffset].setNext(edges[edgeOffset + length - 1]); - edges[edgeOffset].component = component; - } else { - edges[edgeOffset + length - 1].setNext(edges[edgeOffset]); - edges[edgeOffset + length - 1].component = component; - } - - return edges; - } - - /** - * Transforms coordinates in the eastern hemisphere (-180:0) to a (180:360) range - */ - private static void translate(Coordinate[] points) { - for (Coordinate c : points) { - if (c.x < 0) { - c.x += 2*DATELINE; - } + @Override + public PolygonBuilder readFrom(StreamInput in) throws IOException { + Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + PolygonBuilder polyBuilder = new PolygonBuilder(orientation); + polyBuilder.shell = LineStringBuilder.PROTOTYPE.readFrom(in); + int holes = in.readVInt(); + for (int i = 0; i < holes; i++) { + polyBuilder.hole(LineStringBuilder.PROTOTYPE.readFrom(in)); } + return polyBuilder; } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 3889e00c4ea..7776c8dbb7a 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.*; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { - private static final int NUMBER_OF_TESTBUILDERS = 20; + private static final int NUMBER_OF_TESTBUILDERS = 1; private static NamedWriteableRegistry namedWriteableRegistry; /** @@ -50,6 +50,7 @@ public abstract class AbstractShapeBuilderTestCase exte namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java index e4e483c0fcb..3a1f458cd87 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -31,6 +31,9 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected PolygonBuilder createTestShapeBuilder() { + PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); + // NORELEASE translated might have been changed by createShape, but won't survive xContent->Parse roundtrip + pgb.shell().translated = false; + return pgb; + } + + @Override + protected PolygonBuilder mutate(PolygonBuilder original) throws IOException { + PolygonBuilder mutation = copyShape(original); + if (randomBoolean()) { + // toggle orientation + mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + } else { + // change either point in shell or in random hole + LineStringBuilder lineToChange; + if (randomBoolean() || mutation.holes().size() == 0) { + lineToChange = mutation.shell(); + } else { + lineToChange = randomFrom(mutation.holes()); + } + Coordinate coordinate = randomFrom(lineToChange.coordinates(false)); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + } + return mutation; + } +} From 1f5ee642fd2d1d72b75f608148df779072850b4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Nov 2015 18:35:35 +0100 Subject: [PATCH 107/322] Making MultiPolygonBuilder writable and adding equals/hashCode --- .../geo/builders/MultiPolygonBuilder.java | 53 +++++++++++++++++- .../common/geo/builders/PolygonBuilder.java | 2 + .../AbstractShapeBuilderTestCase.java | 3 +- .../builders/MultiPolygonBuilderTests.java | 56 +++++++++++++++++++ .../geo/builders/PolygonBuilderTests.java | 15 +++-- 5 files changed, 122 insertions(+), 7 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index e7762e51b61..e6525f770f7 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -22,8 +22,12 @@ package org.elasticsearch.common.geo.builders; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.Objects; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import com.spatial4j.core.shape.Shape; @@ -32,8 +36,9 @@ import com.vividsolutions.jts.geom.Coordinate; public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; + static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); - protected final ArrayList polygons = new ArrayList<>(); + private final ArrayList polygons = new ArrayList<>(); public MultiPolygonBuilder() { this(Orientation.RIGHT); @@ -48,10 +53,18 @@ public class MultiPolygonBuilder extends ShapeBuilder { return this; } + /** + * get the list of polygons + */ + public ArrayList polygons() { + return polygons; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); for(PolygonBuilder polygon : polygons) { builder.startArray(); @@ -89,4 +102,42 @@ public class MultiPolygonBuilder extends ShapeBuilder { return new XShapeCollection<>(shapes, SPATIAL_CONTEXT); //note: ShapeCollection is probably faster than a Multi* geom. } + + @Override + public int hashCode() { + return Objects.hash(polygons, orientation); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiPolygonBuilder other = (MultiPolygonBuilder) obj; + return Objects.equals(polygons, other.polygons) && + Objects.equals(orientation, other.orientation); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(orientation == Orientation.RIGHT); + out.writeVInt(polygons.size()); + for (PolygonBuilder polygon : polygons) { + polygon.writeTo(out); + } + } + + @Override + public MultiPolygonBuilder readFrom(StreamInput in) throws IOException { + Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + MultiPolygonBuilder polyBuilder = new MultiPolygonBuilder(orientation); + int holes = in.readVInt(); + for (int i = 0; i < holes; i++) { + polyBuilder.polygon(PolygonBuilder.PROTOTYPE.readFrom(in)); + } + return polyBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 13b068b2e5b..81555aa716a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -42,6 +42,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.concurrent.atomic.AtomicBoolean; import java.util.List; +import java.util.Locale; import java.util.Objects; /** @@ -196,6 +197,7 @@ public class PolygonBuilder extends ShapeBuilder { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); coordinatesArray(builder, params); builder.endArray(); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 7776c8dbb7a..d0f9914ef7d 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.*; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { - private static final int NUMBER_OF_TESTBUILDERS = 1; + private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; /** @@ -51,6 +51,7 @@ public abstract class AbstractShapeBuilderTestCase exte namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java new file mode 100644 index 00000000000..eb174d1aeef --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiPolygonBuilder createTestShapeBuilder() { + MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values())); + int polys = randomIntBetween(1, 10); + for (int i = 0; i < polys; i++) { + PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); + pgb.orientation = mpb.orientation; + // NORELEASE translated might have been changed by createShape, but won't survive xContent->Parse roundtrip + pgb.shell().translated = false; + mpb.polygon(pgb); + } + return mpb; + } + + @Override + protected MultiPolygonBuilder mutate(MultiPolygonBuilder original) throws IOException { + MultiPolygonBuilder mutation = copyShape(original); + if (randomBoolean()) { + // toggle orientation + mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + } else { + int polyToChange = randomInt(mutation.polygons().size() - 1); + PolygonBuilderTests.mutatePolygonBuilder(mutation.polygons().get(polyToChange)); + } + return mutation; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index e6e4ae38429..e95adb40e73 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -32,6 +32,7 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCaseParse roundtrip pgb.shell().translated = false; return pgb; @@ -40,16 +41,20 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase Date: Mon, 23 Nov 2015 15:36:42 +0100 Subject: [PATCH 108/322] Adding serialization, equals and hashCode to GeometryCollectionBuilder --- .../common/geo/builders/EnvelopeBuilder.java | 5 +- .../builders/GeometryCollectionBuilder.java | 44 +++++++ .../geo/builders/MultiPolygonBuilder.java | 5 +- .../common/geo/builders/PolygonBuilder.java | 5 +- .../common/geo/builders/ShapeBuilder.java | 8 ++ .../common/io/stream/StreamInput.java | 9 ++ .../common/io/stream/StreamOutput.java | 8 ++ ...lderTests.java => CircleBuilderTests.java} | 4 +- .../geo/builders/EnvelopeBuilderTests.java | 2 + .../GeometryCollectionBuilderTests.java | 111 ++++++++++++++++++ .../geo/builders/LineStringBuilderTests.java | 2 + .../builders/MultiLineStringBuilderTests.java | 2 + .../geo/builders/MultiPointBuilderTests.java | 2 + .../builders/MultiPolygonBuilderTests.java | 2 + .../geo/builders/PointBuilderTests.java | 2 + .../geo/builders/PolygonBuilderTests.java | 2 + 16 files changed, 203 insertions(+), 10 deletions(-) rename core/src/test/java/org/elasticsearch/common/geo/builders/{CirlceBuilderTests.java => CircleBuilderTests.java} (94%) create mode 100644 core/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 62eea070032..793e24d37b1 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -108,15 +108,14 @@ public class EnvelopeBuilder extends ShapeBuilder { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(orientation == Orientation.RIGHT); + orientation.writeTo(out); writeCoordinateTo(topLeft, out); writeCoordinateTo(bottomRight, out); } @Override public EnvelopeBuilder readFrom(StreamInput in) throws IOException { - Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; - return new EnvelopeBuilder(orientation) + return new EnvelopeBuilder(Orientation.readFrom(in)) .topLeft(readCoordinateFrom(in)) .bottomRight(readCoordinateFrom(in)); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 45397ed962f..e4db15998fd 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -20,18 +20,25 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Shape; + import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.Objects; public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; + public static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); + protected final ArrayList shapes = new ArrayList<>(); public GeometryCollectionBuilder() { @@ -103,6 +110,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_GEOMETRIES); for (ShapeBuilder shape : shapes) { shape.toXContent(builder, params); @@ -132,4 +140,40 @@ public class GeometryCollectionBuilder extends ShapeBuilder { //note: ShapeCollection is probably faster than a Multi* geom. } + @Override + public int hashCode() { + return Objects.hash(orientation, shapes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + GeometryCollectionBuilder other = (GeometryCollectionBuilder) obj; + return Objects.equals(orientation, other.orientation) && Objects.equals(shapes, other.shapes); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + orientation.writeTo(out); + out.writeVInt(shapes.size()); + for (ShapeBuilder shape : shapes) { + out.writeShape(shape); + } + } + + @Override + public GeometryCollectionBuilder readFrom(StreamInput in) throws IOException { + GeometryCollectionBuilder geometryCollectionBuilder = new GeometryCollectionBuilder(Orientation.readFrom(in)); + int shapes = in.readVInt(); + for (int i = 0; i < shapes; i++) { + geometryCollectionBuilder.shape(in.readShape()); + } + return geometryCollectionBuilder; + } + } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index e6525f770f7..46d39bdbba2 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -123,7 +123,7 @@ public class MultiPolygonBuilder extends ShapeBuilder { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(orientation == Orientation.RIGHT); + orientation.writeTo(out); out.writeVInt(polygons.size()); for (PolygonBuilder polygon : polygons) { polygon.writeTo(out); @@ -132,8 +132,7 @@ public class MultiPolygonBuilder extends ShapeBuilder { @Override public MultiPolygonBuilder readFrom(StreamInput in) throws IOException { - Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; - MultiPolygonBuilder polyBuilder = new MultiPolygonBuilder(orientation); + MultiPolygonBuilder polyBuilder = new MultiPolygonBuilder(Orientation.readFrom(in)); int holes = in.readVInt(); for (int i = 0; i < holes; i++) { polyBuilder.polygon(PolygonBuilder.PROTOTYPE.readFrom(in)); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 81555aa716a..5aff34dbda5 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -562,7 +562,7 @@ public class PolygonBuilder extends ShapeBuilder { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(orientation == Orientation.RIGHT); + orientation.writeTo(out); shell.writeTo(out); out.writeVInt(holes.size()); for (LineStringBuilder hole : holes) { @@ -572,8 +572,7 @@ public class PolygonBuilder extends ShapeBuilder { @Override public PolygonBuilder readFrom(StreamInput in) throws IOException { - Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; - PolygonBuilder polyBuilder = new PolygonBuilder(orientation); + PolygonBuilder polyBuilder = new PolygonBuilder(Orientation.readFrom(in)); polyBuilder.shell = LineStringBuilder.PROTOTYPE.readFrom(in); int holes = in.readVInt(); for (int i = 0; i < holes; i++) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index d8689ee737f..40c54efc7f9 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -408,6 +408,14 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri public static final Orientation COUNTER_CLOCKWISE = Orientation.RIGHT; public static final Orientation CW = Orientation.LEFT; public static final Orientation CCW = Orientation.RIGHT; + + public void writeTo (StreamOutput out) throws IOException { + out.writeBoolean(this == Orientation.RIGHT); + } + + public static Orientation readFrom (StreamInput in) throws IOException { + return in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + } } public static final String FIELD_TYPE = "type"; diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 76438fb8b6b..f8072804225 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -34,6 +34,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; @@ -629,6 +631,13 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(QueryBuilder.class); } + /** + * Reads a {@link ShapeBuilder} from the current stream + */ + public ShapeBuilder readShape() throws IOException { + return readNamedWriteable(ShapeBuilder.class); + } + /** * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream */ diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 5f1e7623d28..e8997b8073f 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -32,6 +32,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; @@ -618,6 +619,13 @@ public abstract class StreamOutput extends OutputStream { writeNamedWriteable(queryBuilder); } + /** + * Writes a {@link ShapeBuilder} to the current stream + */ + public void writeShape(ShapeBuilder shapeBuilder) throws IOException { + writeNamedWriteable(shapeBuilder); + } + /** * Writes a {@link ScoreFunctionBuilder} to the current stream */ diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java similarity index 94% rename from core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java rename to core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java index 6b102b87b2c..9b6bd27b472 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.unit.DistanceUnit; import java.io.IOException; -public class CirlceBuilderTests extends AbstractShapeBuilderTestCase { +public class CircleBuilderTests extends AbstractShapeBuilderTestCase { + + static CircleBuilderTests PROTOTYPE = new CircleBuilderTests(); @Override protected CircleBuilder createTestShapeBuilder() { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java index e6f3db2f8af..a8794fbd687 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -29,6 +29,8 @@ import java.io.IOException; public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase { + static final EnvelopeBuilderTests PROTOTYPE = new EnvelopeBuilderTests(); + @Override protected EnvelopeBuilder createTestShapeBuilder() { EnvelopeBuilder envelope = new EnvelopeBuilder(randomFrom(Orientation.values())); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java new file mode 100644 index 00000000000..46b4044de8f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.test.geo.RandomShapeGenerator; + +import java.io.IOException; + +public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected GeometryCollectionBuilder createTestShapeBuilder() { + // NORELEASE check of GeometryCollectionBuilder should parse maintain orientation + GeometryCollectionBuilder geometryCollection = new GeometryCollectionBuilder(); + int shapes = randomIntBetween(0, 8); + for (int i = 0; i < shapes; i++) { + switch (randomIntBetween(0, 7)) { + case 0: + geometryCollection.shape(PointBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 1: + geometryCollection.shape(CircleBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 2: + geometryCollection.shape(EnvelopeBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 3: + geometryCollection.shape(LineStringBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 4: + geometryCollection.shape(MultiLineStringBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 5: + geometryCollection.shape(MultiPolygonBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 6: + geometryCollection.shape(MultiPointBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + case 7: + geometryCollection.shape(PolygonBuilderTests.PROTOTYPE.createTestShapeBuilder()); + break; + } + } + return geometryCollection; + } + + @Override + protected GeometryCollectionBuilder mutate(GeometryCollectionBuilder original) throws IOException { + GeometryCollectionBuilder mutation = copyShape(original); + // NORELEASE check of GeometryCollectionBuilder should parse maintain orientation +// if (randomBoolean()) { +// // toggle orientation +// mutation.orientation = (mutation.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); +// } else { + // change one shape + if (mutation.shapes.size() > 0) { + int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1); + ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition); + switch (shapeToChange.type()) { + case POINT: + shapeToChange = PointBuilderTests.PROTOTYPE.mutate((PointBuilder) shapeToChange); + break; + case CIRCLE: + shapeToChange = CircleBuilderTests.PROTOTYPE.mutate((CircleBuilder) shapeToChange); + break; + case ENVELOPE: + shapeToChange = EnvelopeBuilderTests.PROTOTYPE.mutate((EnvelopeBuilder) shapeToChange); + break; + case LINESTRING: + shapeToChange = LineStringBuilderTests.PROTOTYPE.mutate((LineStringBuilder) shapeToChange); + break; + case MULTILINESTRING: + shapeToChange = MultiLineStringBuilderTests.PROTOTYPE.mutate((MultiLineStringBuilder) shapeToChange); + break; + case MULTIPOLYGON: + shapeToChange = MultiPolygonBuilderTests.PROTOTYPE.mutate((MultiPolygonBuilder) shapeToChange); + break; + case MULTIPOINT: + shapeToChange = MultiPointBuilderTests.PROTOTYPE.mutate((MultiPointBuilder) shapeToChange); + break; + case POLYGON: + shapeToChange = PolygonBuilderTests.PROTOTYPE.mutate((PolygonBuilder) shapeToChange); + break; + case GEOMETRYCOLLECTION: + throw new UnsupportedOperationException("GeometryCollection should not be nested inside each other"); + } + mutation.shapes.set(shapePosition, shapeToChange); + } else { + mutation.shape(RandomShapeGenerator.createShape(getRandom())); + } +// } + return mutation; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java index 3a1f458cd87..72138799881 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -28,6 +28,8 @@ import java.io.IOException; public class LineStringBuilderTests extends AbstractShapeBuilderTestCase { + static final LineStringBuilderTests PROTOTYPE = new LineStringBuilderTests(); + @Override protected LineStringBuilder createTestShapeBuilder() { LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java index 07698683e87..b43ffb42e6f 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java @@ -28,6 +28,8 @@ import java.io.IOException; public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase { + static final MultiLineStringBuilderTests PROTOTYPE = new MultiLineStringBuilderTests(); + @Override protected MultiLineStringBuilder createTestShapeBuilder() { return (MultiLineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTILINESTRING); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java index 49c256a4362..952d5d50ec3 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java @@ -28,6 +28,8 @@ import java.io.IOException; public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase { + static final MultiPointBuilderTests PROTOTYPE = new MultiPointBuilderTests(); + @Override protected MultiPointBuilder createTestShapeBuilder() { return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java index eb174d1aeef..1b8e813e956 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -27,6 +27,8 @@ import java.io.IOException; public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase { + static final MultiPolygonBuilderTests PROTOTYPE = new MultiPolygonBuilderTests(); + @Override protected MultiPolygonBuilder createTestShapeBuilder() { MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values())); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java index 1e94a1bab3a..a7e07155ce8 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; public class PointBuilderTests extends AbstractShapeBuilderTestCase { + static PointBuilderTests PROTOTYPE = new PointBuilderTests(); + @Override protected PointBuilder createTestShapeBuilder() { return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index e95adb40e73..2c9ce090535 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -29,6 +29,8 @@ import java.io.IOException; public class PolygonBuilderTests extends AbstractShapeBuilderTestCase { + static final PolygonBuilderTests PROTOTYPE = new PolygonBuilderTests(); + @Override protected PolygonBuilder createTestShapeBuilder() { PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); From f07e61c05b0d475c0847bb699acb2f5db72d822e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 23 Nov 2015 18:38:06 +0100 Subject: [PATCH 109/322] Make GeoShapeQueryBuilder serialize shape --- .../common/geo/builders/CircleBuilder.java | 2 +- .../common/geo/builders/EnvelopeBuilder.java | 3 +- .../geo/builders/LineStringBuilder.java | 10 +-- .../geo/builders/MultiLineStringBuilder.java | 2 +- .../geo/builders/MultiPointBuilder.java | 2 +- .../geo/builders/MultiPolygonBuilder.java | 2 +- .../common/geo/builders/PointBuilder.java | 2 +- .../common/geo/builders/PolygonBuilder.java | 2 +- .../common/geo/builders/ShapeBuilder.java | 11 --- .../geo/builders/ShapeBuilderRegistry.java | 42 ++++++++++ .../index/query/GeoShapeQueryBuilder.java | 78 ++++++------------- .../index/query/GeoShapeQueryParser.java | 9 +-- .../elasticsearch/indices/IndicesModule.java | 2 + .../AbstractShapeBuilderTestCase.java | 1 + .../geo/builders/CircleBuilderTests.java | 2 +- .../builders/MultiPolygonBuilderTests.java | 2 +- .../geo/builders/PointBuilderTests.java | 4 +- .../geo/builders/PolygonBuilderTests.java | 2 +- .../index/query/AbstractQueryTestCase.java | 2 + .../query/GeoShapeQueryBuilderTests.java | 37 +++++---- 20 files changed, 113 insertions(+), 104 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index ddafecc4e7f..5f11d12a4bf 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -36,7 +36,7 @@ public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; - static final CircleBuilder PROTOTYPE = new CircleBuilder(); + public static final CircleBuilder PROTOTYPE = new CircleBuilder(); private DistanceUnit unit; private double radius; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 793e24d37b1..82fd3275116 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -25,7 +25,6 @@ import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; - import java.io.IOException; import java.util.Locale; import java.util.Objects; @@ -34,7 +33,7 @@ public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; - static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); protected Coordinate topLeft; protected Coordinate bottomRight; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 71d9bee8686..cec2a66e757 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -38,7 +38,7 @@ public class LineStringBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; - static final LineStringBuilder PROTOTYPE = new LineStringBuilder(); + public static final LineStringBuilder PROTOTYPE = new LineStringBuilder(); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { @@ -148,7 +148,7 @@ public class LineStringBuilder extends PointCollection { @Override public int hashCode() { - return Objects.hash(points, translated); + return Objects.hash(points, translated()); } @Override @@ -161,7 +161,7 @@ public class LineStringBuilder extends PointCollection { } LineStringBuilder other = (LineStringBuilder) obj; return Objects.equals(points, other.points) && - (translated == other.translated); + (translated() == other.translated()); } @Override @@ -170,7 +170,7 @@ public class LineStringBuilder extends PointCollection { for (Coordinate point : points) { writeCoordinateTo(point, out); } - out.writeBoolean(translated); + out.writeBoolean(translated()); } @Override @@ -180,7 +180,7 @@ public class LineStringBuilder extends PointCollection { for (int i=0; i < size; i++) { lineStringBuilder.point(readCoordinateFrom(in)); } - lineStringBuilder.translated = in.readBoolean(); + lineStringBuilder.translated(in.readBoolean()); return lineStringBuilder; } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 16b36d81037..c0a79611dec 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -37,7 +37,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; - static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); + public static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); private final ArrayList lines = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 4f2ad8cbc57..f1b403d42b9 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -36,7 +36,7 @@ public class MultiPointBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; - final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(); + public final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 46d39bdbba2..a82c0fad9bd 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -36,7 +36,7 @@ import com.vividsolutions.jts.geom.Coordinate; public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; - static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); + public static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); private final ArrayList polygons = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 22c293f4939..35225461658 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -32,7 +32,7 @@ import com.vividsolutions.jts.geom.Coordinate; public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - static final PointBuilder PROTOTYPE = new PointBuilder(); + public static final PointBuilder PROTOTYPE = new PointBuilder(); private Coordinate coordinate; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 5aff34dbda5..80591dbb3eb 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -53,7 +53,7 @@ import java.util.Objects; public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; - static final PolygonBuilder PROTOTYPE = new PolygonBuilder(); + public static final PolygonBuilder PROTOTYPE = new PolygonBuilder(); private static final Coordinate[][] EMPTY = new Coordinate[0][]; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 40c54efc7f9..1ab568d4be1 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -708,15 +708,4 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri public String getWriteableName() { return type().shapeName(); } - - // NORELEASE this should be deleted as soon as all shape builders implement writable - @Override - public void writeTo(StreamOutput out) throws IOException { - } - - // NORELEASE this should be deleted as soon as all shape builders implement writable - @Override - public ShapeBuilder readFrom(StreamInput in) throws IOException { - return null; - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java new file mode 100644 index 00000000000..a900b6a9ee5 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +/** + * Register the shape builder prototypes with the {@link NamedWriteableRegistry} + */ +public class ShapeBuilderRegistry { + + @Inject + public ShapeBuilderRegistry(NamedWriteableRegistry namedWriteableRegistry) { + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 5aad36cd27a..454465727b7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -31,19 +31,16 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.search.internal.SearchContext; @@ -61,13 +58,11 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder { String fieldName = null; ShapeRelation shapeRelation = null; SpatialStrategy strategy = null; - BytesReference shape = null; + ShapeBuilder shape = null; String id = null; String type = null; @@ -79,8 +77,7 @@ public class GeoShapeQueryParser implements QueryParser { currentFieldName = parser.currentName(); token = parser.nextToken(); if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) { - XContentBuilder builder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); - shape = builder.bytes(); + shape = ShapeBuilder.parse(parser); } else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 6878002c015..3faeb6fb3df 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.indices; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.NodeServicesProvider; @@ -218,6 +219,7 @@ public class IndicesModule extends AbstractModule { bind(IndicesFieldDataCacheListener.class).asEagerSingleton(); bind(TermVectorsService.class).asEagerSingleton(); bind(NodeServicesProvider.class).asEagerSingleton(); + bind(ShapeBuilderRegistry.class).asEagerSingleton(); } // public for testing diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index d0f9914ef7d..a7fbfb8e380 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -52,6 +52,7 @@ public abstract class AbstractShapeBuilderTestCase exte namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java index 9b6bd27b472..17ed5e19876 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java @@ -27,7 +27,7 @@ import java.io.IOException; public class CircleBuilderTests extends AbstractShapeBuilderTestCase { - static CircleBuilderTests PROTOTYPE = new CircleBuilderTests(); + final static CircleBuilderTests PROTOTYPE = new CircleBuilderTests(); @Override protected CircleBuilder createTestShapeBuilder() { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java index 1b8e813e956..4429dca608a 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -37,7 +37,7 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCaseParse roundtrip - pgb.shell().translated = false; + pgb.shell().translated(false); mpb.polygon(pgb); } return mpb; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java index a7e07155ce8..e1bc86d57a4 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; public class PointBuilderTests extends AbstractShapeBuilderTestCase { - static PointBuilderTests PROTOTYPE = new PointBuilderTests(); + final static PointBuilderTests PROTOTYPE = new PointBuilderTests(); @Override protected PointBuilder createTestShapeBuilder() { @@ -35,6 +35,6 @@ public class PointBuilderTests extends AbstractShapeBuilderTestCaseParse roundtrip - pgb.shell().translated = false; + pgb.shell().translated(false); return pgb; } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index aa97d722737..2d250ff0b95 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -191,6 +192,7 @@ public abstract class AbstractQueryTestCase> // skip services bindQueryParsersExtension(); bindMapperExtension(); + bind(ShapeBuilderRegistry.class).asEagerSingleton(); } }, new ScriptModule(settings) { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 6b2088d25c6..55292fefc7a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -59,14 +60,17 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase Date: Fri, 4 Dec 2015 13:33:30 +0100 Subject: [PATCH 110/322] Changes after rebase on master --- .../geo/builders/LineStringBuilder.java | 7 +- .../geo/builders/MultiLineStringBuilder.java | 2 +- .../geo/builders/MultiPointBuilder.java | 2 +- .../common/geo/builders/PolygonBuilder.java | 144 ++++++++++++++++++ .../AbstractShapeBuilderTestCase.java | 18 +-- .../geo/builders/CircleBuilderTests.java | 24 +-- .../geo/builders/EnvelopeBuilderTests.java | 24 +-- .../GeometryCollectionBuilderTests.java | 40 ++--- .../geo/builders/LineStringBuilderTests.java | 24 +-- .../builders/MultiLineStringBuilderTests.java | 16 +- .../geo/builders/MultiPointBuilderTests.java | 16 +- .../builders/MultiPolygonBuilderTests.java | 32 ++-- .../geo/builders/PointBuilderTests.java | 18 ++- .../geo/builders/PolygonBuilderTests.java | 22 +-- .../query/GeoShapeQueryBuilderTests.java | 7 - 15 files changed, 291 insertions(+), 105 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index cec2a66e757..464d72c8d8c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -148,7 +148,7 @@ public class LineStringBuilder extends PointCollection { @Override public int hashCode() { - return Objects.hash(points, translated()); + return Objects.hash(points); } @Override @@ -160,8 +160,7 @@ public class LineStringBuilder extends PointCollection { return false; } LineStringBuilder other = (LineStringBuilder) obj; - return Objects.equals(points, other.points) && - (translated() == other.translated()); + return Objects.equals(points, other.points); } @Override @@ -170,7 +169,6 @@ public class LineStringBuilder extends PointCollection { for (Coordinate point : points) { writeCoordinateTo(point, out); } - out.writeBoolean(translated()); } @Override @@ -180,7 +178,6 @@ public class LineStringBuilder extends PointCollection { for (int i=0; i < size; i++) { lineStringBuilder.point(readCoordinateFrom(in)); } - lineStringBuilder.translated(in.readBoolean()); return lineStringBuilder; } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index c0a79611dec..4703ac19b08 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -131,7 +131,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { public MultiLineStringBuilder readFrom(StreamInput in) throws IOException { MultiLineStringBuilder multiLineStringBuilder = new MultiLineStringBuilder(); int size = in.readVInt(); - for (int i=0; i < size; i++) { + for (int i = 0; i < size; i++) { multiLineStringBuilder.linestring(LineStringBuilder.PROTOTYPE.readFrom(in)); } return multiLineStringBuilder; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index f1b403d42b9..a4d236e3557 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -56,7 +56,7 @@ public class MultiPointBuilder extends PointCollection { for (Coordinate coord : points) { shapes.add(SPATIAL_CONTEXT.makePoint(coord.x, coord.y)); } - XShapeCollection multiPoints = new XShapeCollection<>(shapes, SPATIAL_CONTEXT); + XShapeCollection multiPoints = new XShapeCollection<>(shapes, SPATIAL_CONTEXT); multiPoints.setPointsOnly(true); return multiPoints; } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 80591dbb3eb..fefbcb348ca 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -541,6 +541,150 @@ public class PolygonBuilder extends ShapeBuilder { return points.length-1; } + /** + * Create a connected list of a list of coordinates + * + * @param points + * array of point + * @param offset + * index of the first point + * @param length + * number of points + * @return Array of edges + */ + private static Edge[] ring(int component, boolean direction, boolean handedness, LineStringBuilder shell, + Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { + // calculate the direction of the points: + // find the point a the top of the set and check its + // neighbors orientation. So direction is equivalent + // to clockwise/counterclockwise + final int top = top(points, offset, length); + final int prev = (offset + ((top + length - 1) % length)); + final int next = (offset + ((top + 1) % length)); + boolean orientation = points[offset + prev].x > points[offset + next].x; + + // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) + // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards + // thus if orientation is computed as cw, the logic will translate points across dateline + // and convert to a right handed system + + // compute the bounding box and calculate range + double[] range = range(points, offset, length); + final double rng = range[1] - range[0]; + // translate the points if the following is true + // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres + // (translation would result in a collapsed poly) + // 2. the shell of the candidate hole has been translated (to preserve the coordinate system) + boolean incorrectOrientation = component == 0 && handedness != orientation; + if ( (incorrectOrientation && (rng > DATELINE && rng != 2*DATELINE)) || (translated.get() && component != 0)) { + translate(points); + // flip the translation bit if the shell is being translated + if (component == 0) { + translated.set(true); + } + // correct the orientation post translation (ccw for shell, cw for holes) + if (component == 0 || (component != 0 && handedness == orientation)) { + orientation = !orientation; + } + } + return concat(component, direction ^ orientation, points, offset, edges, toffset, length); + } + + private static final int top(Coordinate[] points, int offset, int length) { + int top = 0; // we start at 1 here since top points to 0 + for (int i = 1; i < length; i++) { + if (points[offset + i].y < points[offset + top].y) { + top = i; + } else if (points[offset + i].y == points[offset + top].y) { + if (points[offset + i].x < points[offset + top].x) { + top = i; + } + } + } + return top; + } + + private static final double[] range(Coordinate[] points, int offset, int length) { + double minX = points[0].x; + double maxX = points[0].x; + double minY = points[0].y; + double maxY = points[0].y; + // compute the bounding coordinates (@todo: cleanup brute force) + for (int i = 1; i < length; ++i) { + if (points[offset + i].x < minX) { + minX = points[offset + i].x; + } + if (points[offset + i].x > maxX) { + maxX = points[offset + i].x; + } + if (points[offset + i].y < minY) { + minY = points[offset + i].y; + } + if (points[offset + i].y > maxY) { + maxY = points[offset + i].y; + } + } + return new double[] {minX, maxX, minY, maxY}; + } + + /** + * Concatenate a set of points to a polygon + * + * @param component + * component id of the polygon + * @param direction + * direction of the ring + * @param points + * list of points to concatenate + * @param pointOffset + * index of the first point + * @param edges + * Array of edges to write the result to + * @param edgeOffset + * index of the first edge in the result + * @param length + * number of points to use + * @return the edges creates + */ + private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset, + int length) { + assert edges.length >= length+edgeOffset; + assert points.length >= length+pointOffset; + edges[edgeOffset] = new Edge(points[pointOffset], null); + for (int i = 1; i < length; i++) { + if (direction) { + edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]); + edges[edgeOffset + i].component = component; + } else if(!edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i])) { + edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null); + edges[edgeOffset + i - 1].component = component; + } else { + throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]); + } + } + + if (direction) { + edges[edgeOffset].setNext(edges[edgeOffset + length - 1]); + edges[edgeOffset].component = component; + } else { + edges[edgeOffset + length - 1].setNext(edges[edgeOffset]); + edges[edgeOffset + length - 1].component = component; + } + + return edges; + } + + /** + * Transforms coordinates in the eastern hemisphere (-180:0) to a (180:360) range + */ + private static void translate(Coordinate[] points) { + for (Coordinate c : points) { + if (c.x < 0) { + c.x += 2*DATELINE; + } + } + } + @Override public int hashCode() { return Objects.hash(shell, holes, orientation); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index a7fbfb8e380..10a5070f3f8 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -69,7 +69,7 @@ public abstract class AbstractShapeBuilderTestCase exte /** * mutate the given shape so the returned shape is different */ - protected abstract SB mutate(SB original) throws IOException; + protected abstract SB createMutation(SB original) throws IOException; /** * Test that creates new shape from a random test shape and checks both for equality @@ -95,10 +95,11 @@ public abstract class AbstractShapeBuilderTestCase exte /** * Test serialization and deserialization of the test shape. */ + @SuppressWarnings("unchecked") public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB testShape = createTestShapeBuilder(); - SB deserializedShape = copyShape(testShape); + SB deserializedShape = (SB) copyShape(testShape); assertEquals(testShape, deserializedShape); assertEquals(testShape.hashCode(), deserializedShape.hashCode()); assertNotSame(testShape, deserializedShape); @@ -108,6 +109,7 @@ public abstract class AbstractShapeBuilderTestCase exte /** * Test equality and hashCode properties */ + @SuppressWarnings("unchecked") public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB firstShape = createTestShapeBuilder(); @@ -116,15 +118,15 @@ public abstract class AbstractShapeBuilderTestCase exte assertTrue("shape is not equal to self", firstShape.equals(firstShape)); assertThat("same shape's hashcode returns different values if called multiple times", firstShape.hashCode(), equalTo(firstShape.hashCode())); - assertThat("different shapes should not be equal", mutate(firstShape), not(equalTo(firstShape))); + assertThat("different shapes should not be equal", createMutation(firstShape), not(equalTo(firstShape))); - SB secondShape = copyShape(firstShape); + SB secondShape = (SB) copyShape(firstShape); assertTrue("shape is not equal to self", secondShape.equals(secondShape)); assertTrue("shape is not equal to its copy", firstShape.equals(secondShape)); assertTrue("equals is not symmetric", secondShape.equals(firstShape)); assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(firstShape.hashCode())); - SB thirdShape = copyShape(secondShape); + SB thirdShape = (SB) copyShape(secondShape); assertTrue("shape is not equal to self", thirdShape.equals(thirdShape)); assertTrue("shape is not equal to its copy", secondShape.equals(thirdShape)); assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(thirdShape.hashCode())); @@ -135,14 +137,12 @@ public abstract class AbstractShapeBuilderTestCase exte } } - protected SB copyShape(SB original) throws IOException { + static ShapeBuilder copyShape(ShapeBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { ShapeBuilder prototype = (ShapeBuilder) namedWriteableRegistry.getPrototype(ShapeBuilder.class, original.getWriteableName()); - @SuppressWarnings("unchecked") - SB copy = (SB) prototype.readFrom(in); - return copy; + return prototype.readFrom(in); } } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java index 17ed5e19876..1db9da428ad 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java @@ -27,20 +27,18 @@ import java.io.IOException; public class CircleBuilderTests extends AbstractShapeBuilderTestCase { - final static CircleBuilderTests PROTOTYPE = new CircleBuilderTests(); - @Override protected CircleBuilder createTestShapeBuilder() { - double centerX = randomDoubleBetween(-180, 180, false); - double centerY = randomDoubleBetween(-90, 90, false); - return new CircleBuilder() - .center(new Coordinate(centerX, centerY)) - .radius(randomDoubleBetween(0.1, 10.0, false), randomFrom(DistanceUnit.values())); + return createRandomShape(); } @Override - protected CircleBuilder mutate(CircleBuilder original) throws IOException { - CircleBuilder mutation = copyShape(original); + protected CircleBuilder createMutation(CircleBuilder original) throws IOException { + return mutate(original); + } + + static CircleBuilder mutate(CircleBuilder original) throws IOException { + CircleBuilder mutation = (CircleBuilder) copyShape(original); double radius = original.radius(); DistanceUnit unit = original.unit(); @@ -57,4 +55,12 @@ public class CircleBuilderTests extends AbstractShapeBuilderTestCase { - static final EnvelopeBuilderTests PROTOTYPE = new EnvelopeBuilderTests(); - @Override protected EnvelopeBuilder createTestShapeBuilder() { - EnvelopeBuilder envelope = new EnvelopeBuilder(randomFrom(Orientation.values())); - Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom())); - envelope.topLeft(box.getMinX(), box.getMaxY()) - .bottomRight(box.getMaxX(), box.getMinY()); - return envelope; + return createRandomShape(); } @Override - protected EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException { - EnvelopeBuilder mutation = copyShape(original); + protected EnvelopeBuilder createMutation(EnvelopeBuilder original) throws IOException { + return mutate(original); + } + + static EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException { + EnvelopeBuilder mutation = (EnvelopeBuilder) copyShape(original); if (randomBoolean()) { // toggle orientation mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); @@ -65,4 +63,12 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase { - static final LineStringBuilderTests PROTOTYPE = new LineStringBuilderTests(); - @Override protected LineStringBuilder createTestShapeBuilder() { - LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING); - if (randomBoolean()) { - lsb.close(); - } - return lsb; + return createRandomShape(); } @Override - protected LineStringBuilder mutate(LineStringBuilder original) throws IOException { - LineStringBuilder mutation = copyShape(original); + protected LineStringBuilder createMutation(LineStringBuilder original) throws IOException { + return mutate(original); + } + + static LineStringBuilder mutate(LineStringBuilder original) throws IOException { + LineStringBuilder mutation = (LineStringBuilder) copyShape(original); Coordinate[] coordinates = original.coordinates(false); Coordinate coordinate = randomFrom(coordinates); if (randomBoolean()) { @@ -59,4 +57,12 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase { - static final MultiLineStringBuilderTests PROTOTYPE = new MultiLineStringBuilderTests(); - @Override protected MultiLineStringBuilder createTestShapeBuilder() { - return (MultiLineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTILINESTRING); + return createRandomShape(); } @Override - protected MultiLineStringBuilder mutate(MultiLineStringBuilder original) throws IOException { - MultiLineStringBuilder mutation = copyShape(original); + protected MultiLineStringBuilder createMutation(MultiLineStringBuilder original) throws IOException { + return mutate(original); + } + + static MultiLineStringBuilder mutate(MultiLineStringBuilder original) throws IOException { + MultiLineStringBuilder mutation = (MultiLineStringBuilder) copyShape(original); Coordinate[][] coordinates = mutation.coordinates(); int lineToChange = randomInt(coordinates.length - 1); for (int i = 0; i < coordinates.length; i++) { @@ -61,4 +63,8 @@ public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase { - static final MultiPointBuilderTests PROTOTYPE = new MultiPointBuilderTests(); - @Override protected MultiPointBuilder createTestShapeBuilder() { - return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT); + return createRandomShape(); } @Override - protected MultiPointBuilder mutate(MultiPointBuilder original) throws IOException { - MultiPointBuilder mutation = copyShape(original); + protected MultiPointBuilder createMutation(MultiPointBuilder original) throws IOException { + return mutate(original); + } + + static MultiPointBuilder mutate(MultiPointBuilder original) throws IOException { + MultiPointBuilder mutation = (MultiPointBuilder) copyShape(original); Coordinate[] coordinates = original.coordinates(false); Coordinate coordinate = randomFrom(coordinates); if (randomBoolean()) { @@ -55,4 +57,8 @@ public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase { - static final MultiPolygonBuilderTests PROTOTYPE = new MultiPolygonBuilderTests(); - @Override protected MultiPolygonBuilder createTestShapeBuilder() { - MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values())); - int polys = randomIntBetween(1, 10); - for (int i = 0; i < polys; i++) { - PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); - pgb.orientation = mpb.orientation; - // NORELEASE translated might have been changed by createShape, but won't survive xContent->Parse roundtrip - pgb.shell().translated(false); - mpb.polygon(pgb); - } - return mpb; + return createRandomShape(); } @Override - protected MultiPolygonBuilder mutate(MultiPolygonBuilder original) throws IOException { - MultiPolygonBuilder mutation = copyShape(original); + protected MultiPolygonBuilder createMutation(MultiPolygonBuilder original) throws IOException { + return mutate(original); + } + + static MultiPolygonBuilder mutate(MultiPolygonBuilder original) throws IOException { + MultiPolygonBuilder mutation = (MultiPolygonBuilder) copyShape(original); if (randomBoolean()) { // toggle orientation mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); @@ -55,4 +48,15 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase { +import java.io.IOException; - final static PointBuilderTests PROTOTYPE = new PointBuilderTests(); +public class PointBuilderTests extends AbstractShapeBuilderTestCase { @Override protected PointBuilder createTestShapeBuilder() { - return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); + return createRandomShape(); } @Override - protected PointBuilder mutate(PointBuilder original) { + protected PointBuilder createMutation(PointBuilder original) throws IOException { + return mutate(original); + } + + static PointBuilder mutate(PointBuilder original) { return new PointBuilder().coordinate(new Coordinate(original.longitude() / 2, original.latitude() / 2)); } + + static PointBuilder createRandomShape() { + return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); + } + + } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index b7cbf85a601..69457419727 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -29,20 +29,18 @@ import java.io.IOException; public class PolygonBuilderTests extends AbstractShapeBuilderTestCase { - static final PolygonBuilderTests PROTOTYPE = new PolygonBuilderTests(); - @Override protected PolygonBuilder createTestShapeBuilder() { - PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); - pgb.orientation = randomFrom(Orientation.values()); - // NORELEASE translated might have been changed by createShape, but won't survive xContent->Parse roundtrip - pgb.shell().translated(false); - return pgb; + return createRandomShape(); } @Override - protected PolygonBuilder mutate(PolygonBuilder original) throws IOException { - PolygonBuilder mutation = copyShape(original); + protected PolygonBuilder createMutation(PolygonBuilder original) throws IOException { + return mutate(original); + } + + static PolygonBuilder mutate(PolygonBuilder original) throws IOException { + PolygonBuilder mutation = (PolygonBuilder) copyShape(original); return mutatePolygonBuilder(mutation); } @@ -75,4 +73,10 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase Date: Fri, 4 Dec 2015 15:59:28 +0100 Subject: [PATCH 111/322] Only Polygon and MultiPolygon need `orientation` property Removing the `orientation` field from ShapeBuilder, only leaving it in PolygonBuilder and MultiPolygonBuilder which are the only places where it is actually used and parsed at the moment. --- .../common/geo/builders/EnvelopeBuilder.java | 30 +++++------- .../builders/GeometryCollectionBuilder.java | 17 ++----- .../geo/builders/MultiPolygonBuilder.java | 21 ++++++-- .../common/geo/builders/PolygonBuilder.java | 10 +++- .../common/geo/builders/ShapeBuilder.java | 49 ++++++++----------- .../common/geo/builders/ShapeBuilders.java | 18 ------- .../index/mapper/geo/GeoShapeFieldMapper.java | 2 +- .../geo/builders/EnvelopeBuilderTests.java | 36 ++++++-------- .../GeometryCollectionBuilderTests.java | 8 --- .../builders/MultiPolygonBuilderTests.java | 20 +++++--- .../geo/builders/PolygonBuilderTests.java | 20 ++++++-- .../query/GeoShapeQueryBuilderTests.java | 1 - 12 files changed, 109 insertions(+), 123 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 82fd3275116..afcf8990513 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Locale; import java.util.Objects; public class EnvelopeBuilder extends ShapeBuilder { @@ -35,16 +34,8 @@ public class EnvelopeBuilder extends ShapeBuilder { public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); - protected Coordinate topLeft; - protected Coordinate bottomRight; - - public EnvelopeBuilder() { - this(Orientation.RIGHT); - } - - public EnvelopeBuilder(Orientation orientation) { - super(orientation); - } + private Coordinate topLeft; + private Coordinate bottomRight; public EnvelopeBuilder topLeft(Coordinate topLeft) { this.topLeft = topLeft; @@ -55,6 +46,10 @@ public class EnvelopeBuilder extends ShapeBuilder { return topLeft(coordinate(longitude, latitude)); } + public Coordinate topLeft() { + return this.topLeft; + } + public EnvelopeBuilder bottomRight(Coordinate bottomRight) { this.bottomRight = bottomRight; return this; @@ -64,11 +59,14 @@ public class EnvelopeBuilder extends ShapeBuilder { return bottomRight(coordinate(longitude, latitude)); } + public Coordinate bottomRight() { + return this.bottomRight; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); toXContent(builder, topLeft); toXContent(builder, bottomRight); @@ -88,7 +86,7 @@ public class EnvelopeBuilder extends ShapeBuilder { @Override public int hashCode() { - return Objects.hash(orientation, topLeft, bottomRight); + return Objects.hash(topLeft, bottomRight); } @Override @@ -100,21 +98,19 @@ public class EnvelopeBuilder extends ShapeBuilder { return false; } EnvelopeBuilder other = (EnvelopeBuilder) obj; - return Objects.equals(orientation, other.orientation) && - Objects.equals(topLeft, other.topLeft) && + return Objects.equals(topLeft, other.topLeft) && Objects.equals(bottomRight, other.bottomRight); } @Override public void writeTo(StreamOutput out) throws IOException { - orientation.writeTo(out); writeCoordinateTo(topLeft, out); writeCoordinateTo(bottomRight, out); } @Override public EnvelopeBuilder readFrom(StreamInput in) throws IOException { - return new EnvelopeBuilder(Orientation.readFrom(in)) + return new EnvelopeBuilder() .topLeft(readCoordinateFrom(in)) .bottomRight(readCoordinateFrom(in)); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index e4db15998fd..067cd014c0f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.Objects; public class GeometryCollectionBuilder extends ShapeBuilder { @@ -41,14 +40,6 @@ public class GeometryCollectionBuilder extends ShapeBuilder { protected final ArrayList shapes = new ArrayList<>(); - public GeometryCollectionBuilder() { - this(Orientation.RIGHT); - } - - public GeometryCollectionBuilder(Orientation orientation) { - super(orientation); - } - public GeometryCollectionBuilder shape(ShapeBuilder shape) { this.shapes.add(shape); return this; @@ -110,7 +101,6 @@ public class GeometryCollectionBuilder extends ShapeBuilder { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_GEOMETRIES); for (ShapeBuilder shape : shapes) { shape.toXContent(builder, params); @@ -142,7 +132,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { @Override public int hashCode() { - return Objects.hash(orientation, shapes); + return Objects.hash(shapes); } @Override @@ -154,12 +144,11 @@ public class GeometryCollectionBuilder extends ShapeBuilder { return false; } GeometryCollectionBuilder other = (GeometryCollectionBuilder) obj; - return Objects.equals(orientation, other.orientation) && Objects.equals(shapes, other.shapes); + return Objects.equals(shapes, other.shapes); } @Override public void writeTo(StreamOutput out) throws IOException { - orientation.writeTo(out); out.writeVInt(shapes.size()); for (ShapeBuilder shape : shapes) { out.writeShape(shape); @@ -168,7 +157,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { @Override public GeometryCollectionBuilder readFrom(StreamInput in) throws IOException { - GeometryCollectionBuilder geometryCollectionBuilder = new GeometryCollectionBuilder(Orientation.readFrom(in)); + GeometryCollectionBuilder geometryCollectionBuilder = new GeometryCollectionBuilder(); int shapes = in.readVInt(); for (int i = 0; i < shapes; i++) { geometryCollectionBuilder.shape(in.readShape()); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index a82c0fad9bd..2f9d595c9cb 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -40,16 +40,31 @@ public class MultiPolygonBuilder extends ShapeBuilder { private final ArrayList polygons = new ArrayList<>(); + private Orientation orientation = Orientation.RIGHT; + public MultiPolygonBuilder() { this(Orientation.RIGHT); } public MultiPolygonBuilder(Orientation orientation) { - super(orientation); + this.orientation = orientation; } + public Orientation orientation() { + return this.orientation; + } + + /** + * Add a shallow copy of the polygon to the multipolygon. This will apply the orientation of the + * {@link MultiPolygonBuilder} to the polygon if polygon has different orientation. + */ public MultiPolygonBuilder polygon(PolygonBuilder polygon) { - this.polygons.add(polygon); + PolygonBuilder pb = new PolygonBuilder(this.orientation); + pb.points(polygon.shell().coordinates(false)); + for (LineStringBuilder hole : polygon.holes()) { + pb.hole(hole); + } + this.polygons.add(pb); return this; } @@ -118,7 +133,7 @@ public class MultiPolygonBuilder extends ShapeBuilder { } MultiPolygonBuilder other = (MultiPolygonBuilder) obj; return Objects.equals(polygons, other.polygons) && - Objects.equals(orientation, other.orientation); + Objects.equals(orientation, other.orientation); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index fefbcb348ca..03ff6a6b892 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -57,6 +57,8 @@ public class PolygonBuilder extends ShapeBuilder { private static final Coordinate[][] EMPTY = new Coordinate[0][]; + private Orientation orientation = Orientation.RIGHT; + // line string defining the shell of the polygon private LineStringBuilder shell; @@ -64,7 +66,7 @@ public class PolygonBuilder extends ShapeBuilder { private final ArrayList holes = new ArrayList<>(); public PolygonBuilder() { - this(new ArrayList(), Orientation.RIGHT); + this(Orientation.RIGHT); } public PolygonBuilder(Orientation orientation) { @@ -72,10 +74,14 @@ public class PolygonBuilder extends ShapeBuilder { } public PolygonBuilder(ArrayList points, Orientation orientation) { - super(orientation); + this.orientation = orientation; this.shell = new LineStringBuilder().points(points); } + public Orientation orientation() { + return this.orientation; + } + public PolygonBuilder point(double longitude, double latitude) { shell.point(longitude, latitude); return this; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 1ab568d4be1..fcd8177ac6c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -77,16 +77,10 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri /** @see com.spatial4j.core.shape.jts.JtsGeometry#index() */ protected final boolean autoIndexJtsGeometry = true;//may want to turn off once SpatialStrategy impls do it. - protected Orientation orientation = Orientation.RIGHT; - protected ShapeBuilder() { } - protected ShapeBuilder(Orientation orientation) { - this.orientation = orientation; - } - protected static Coordinate coordinate(double longitude, double latitude) { return new Coordinate(longitude, latitude); } @@ -186,22 +180,6 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri return new Coordinate(in.readDouble(), in.readDouble()); } - public static Orientation orientationFromString(String orientation) { - orientation = orientation.toLowerCase(Locale.ROOT); - switch (orientation) { - case "right": - case "counterclockwise": - case "ccw": - return Orientation.RIGHT; - case "left": - case "clockwise": - case "cw": - return Orientation.LEFT; - default: - throw new IllegalArgumentException("Unknown orientation [" + orientation + "]"); - } - } - protected static Coordinate shift(Coordinate coordinate, double dateline) { if (dateline == 0) { return coordinate; @@ -416,6 +394,22 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri public static Orientation readFrom (StreamInput in) throws IOException { return in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; } + + public static Orientation fromString(String orientation) { + orientation = orientation.toLowerCase(Locale.ROOT); + switch (orientation) { + case "right": + case "counterclockwise": + case "ccw": + return Orientation.RIGHT; + case "left": + case "clockwise": + case "cw": + return Orientation.LEFT; + default: + throw new IllegalArgumentException("Unknown orientation [" + orientation + "]"); + } + } } public static final String FIELD_TYPE = "type"; @@ -506,7 +500,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri radius = Distance.parseDistance(parser.text()); } else if (FIELD_ORIENTATION.equals(fieldName)) { parser.nextToken(); - requestedOrientation = orientationFromString(parser.text()); + requestedOrientation = Orientation.fromString(parser.text()); } else { parser.nextToken(); parser.skipChildren(); @@ -532,7 +526,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri case POLYGON: return parsePolygon(node, requestedOrientation, coerce); case MULTIPOLYGON: return parseMultiPolygon(node, requestedOrientation, coerce); case CIRCLE: return parseCircle(node, radius); - case ENVELOPE: return parseEnvelope(node, requestedOrientation); + case ENVELOPE: return parseEnvelope(node); case GEOMETRYCOLLECTION: return geometryCollections; default: throw new ElasticsearchParseException("shape type [{}] not included", shapeType); @@ -558,7 +552,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri return ShapeBuilders.newCircleBuilder().center(coordinates.coordinate).radius(radius); } - protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates, final Orientation orientation) { + protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates) { // validate the coordinate array for envelope type if (coordinates.children.size() != 2) { throw new ElasticsearchParseException("invalid number of points [{}] provided for " + @@ -572,7 +566,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y)); lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y)); } - return ShapeBuilders.newEnvelope(orientation).topLeft(uL).bottomRight(lR); + return ShapeBuilders.newEnvelope().topLeft(uL).bottomRight(lR); } protected static void validateMultiPointNode(CoordinateNode coordinates) { @@ -692,8 +686,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri } XContentParser.Token token = parser.nextToken(); - GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection( (mapper == null) ? Orientation.RIGHT : mapper - .fieldType().orientation()); + GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection(); while (token != XContentParser.Token.END_ARRAY) { ShapeBuilder shapeBuilder = GeoShapeType.parse(parser); geometryCollection.shape(shapeBuilder); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java index e294a9d6ef7..61d7a9cd07e 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java @@ -110,15 +110,6 @@ public class ShapeBuilders { return new GeometryCollectionBuilder(); } - /** - * Create a new GeometryCollection - * - * @return a new {@link GeometryCollectionBuilder} - */ - public static GeometryCollectionBuilder newGeometryCollection(ShapeBuilder.Orientation orientation) { - return new GeometryCollectionBuilder(orientation); - } - /** * create a new Circle * @@ -136,13 +127,4 @@ public class ShapeBuilders { public static EnvelopeBuilder newEnvelope() { return new EnvelopeBuilder(); } - - /** - * create a new rectangle - * - * @return a new {@link EnvelopeBuilder} - */ - public static EnvelopeBuilder newEnvelope(ShapeBuilder.Orientation orientation) { - return new EnvelopeBuilder(orientation); - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index a99517f4003..1ba49e64d80 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -184,7 +184,7 @@ public class GeoShapeFieldMapper extends FieldMapper { builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); iterator.remove(); } else if (Names.ORIENTATION.equals(fieldName)) { - builder.fieldType().setOrientation(ShapeBuilder.orientationFromString(fieldNode.toString())); + builder.fieldType().setOrientation(ShapeBuilder.Orientation.fromString(fieldNode.toString())); iterator.remove(); } else if (Names.STRATEGY.equals(fieldName)) { builder.fieldType().setStrategyName(fieldNode.toString()); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java index 1c1e37e81aa..2015f2b0bc6 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; -import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; import org.elasticsearch.test.geo.RandomShapeGenerator; import java.io.IOException; @@ -41,31 +40,26 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase 0) { int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1); ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition); @@ -109,7 +102,6 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase } else { mutation.shape(RandomShapeGenerator.createShape(getRandom())); } -// } return mutation; } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java index fb41a2dffbb..702114a2cb8 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -38,23 +38,29 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase 0) { + int polyToChange = randomInt(mutation.polygons().size() - 1); + mutation.polygons().set(polyToChange, PolygonBuilderTests.mutatePolygonBuilder(mutation.polygons().get(polyToChange))); + } else { + mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON)); + } } return mutation; } static MultiPolygonBuilder createRandomShape() { MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values())); - int polys = randomIntBetween(1, 10); + int polys = randomIntBetween(0, 10); for (int i = 0; i < polys; i++) { PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); - pgb.orientation = mpb.orientation; mpb.polygon(pgb); } return mpb; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index 69457419727..ad8b3b817fe 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -46,8 +46,7 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase Date: Tue, 15 Dec 2015 09:46:10 +0100 Subject: [PATCH 112/322] Adding integration test checking that empty geometry collection returns zero docs --- .../geo/builders/AbstractShapeBuilderTestCase.java | 10 ++++++++-- .../elasticsearch/search/geo/GeoShapeQueryTests.java | 12 +++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 10a5070f3f8..279e31aadd4 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -23,14 +23,20 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 847e03e5c44..c0cc17fc43d 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.geo; import com.spatial4j.core.shape.Rectangle; + import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; @@ -47,7 +48,10 @@ import static org.elasticsearch.test.geo.RandomShapeGenerator.xRandomPoint; import static org.elasticsearch.test.geo.RandomShapeGenerator.xRandomRectangle; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { public void testNullShape() throws Exception { @@ -396,6 +400,12 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); + // no shape + filter = QueryBuilders.geoShapeQuery("location", ShapeBuilders.newGeometryCollection()); + result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + .setPostFilter(filter).get(); + assertSearchResponse(result); + assertHitCount(result, 0); } public void testPointsOnly() throws Exception { From 8a02295b9a146abaf5a632fd9718aaffeb7b2f01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 17 Dec 2015 17:39:13 +0100 Subject: [PATCH 113/322] Make sure both JTS and Spatial4J are present before registering shape builders, query or mapper --- .../geo/builders/ShapeBuilderRegistry.java | 21 ++--- .../common/io/stream/StreamInput.java | 2 - .../elasticsearch/indices/IndicesModule.java | 77 +++++++++++++++++-- 3 files changed, 84 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java index a900b6a9ee5..c66e969aa3a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -29,14 +30,16 @@ public class ShapeBuilderRegistry { @Inject public ShapeBuilderRegistry(NamedWriteableRegistry namedWriteableRegistry) { - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + } } } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index f8072804225..ffcb4201f4d 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -33,9 +33,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 3faeb6fb3df..61210bb0413 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -28,13 +28,80 @@ import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BinaryFieldMapper; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; +import org.elasticsearch.index.mapper.core.TypeParsers; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.internal.*; +import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.internal.IdFieldMapper; +import org.elasticsearch.index.mapper.internal.IndexFieldMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.mapper.internal.TTLFieldMapper; +import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.BoolQueryParser; +import org.elasticsearch.index.query.BoostingQueryParser; +import org.elasticsearch.index.query.CommonTermsQueryParser; +import org.elasticsearch.index.query.ConstantScoreQueryParser; +import org.elasticsearch.index.query.DisMaxQueryParser; +import org.elasticsearch.index.query.ExistsQueryParser; +import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; +import org.elasticsearch.index.query.FuzzyQueryParser; +import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; +import org.elasticsearch.index.query.GeoDistanceQueryParser; +import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; +import org.elasticsearch.index.query.GeoPolygonQueryParser; +import org.elasticsearch.index.query.GeoShapeQueryParser; +import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.HasChildQueryParser; +import org.elasticsearch.index.query.HasParentQueryParser; +import org.elasticsearch.index.query.IdsQueryParser; +import org.elasticsearch.index.query.IndicesQueryParser; +import org.elasticsearch.index.query.MatchAllQueryParser; +import org.elasticsearch.index.query.MatchNoneQueryParser; +import org.elasticsearch.index.query.MatchQueryParser; +import org.elasticsearch.index.query.MoreLikeThisQueryParser; +import org.elasticsearch.index.query.MultiMatchQueryParser; +import org.elasticsearch.index.query.NestedQueryParser; +import org.elasticsearch.index.query.PrefixQueryParser; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryStringQueryParser; +import org.elasticsearch.index.query.RangeQueryParser; +import org.elasticsearch.index.query.RegexpQueryParser; +import org.elasticsearch.index.query.ScriptQueryParser; +import org.elasticsearch.index.query.SimpleQueryStringParser; +import org.elasticsearch.index.query.SpanContainingQueryParser; +import org.elasticsearch.index.query.SpanFirstQueryParser; +import org.elasticsearch.index.query.SpanMultiTermQueryParser; +import org.elasticsearch.index.query.SpanNearQueryParser; +import org.elasticsearch.index.query.SpanNotQueryParser; +import org.elasticsearch.index.query.SpanOrQueryParser; +import org.elasticsearch.index.query.SpanTermQueryParser; +import org.elasticsearch.index.query.SpanWithinQueryParser; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.TermsQueryParser; +import org.elasticsearch.index.query.TypeQueryParser; +import org.elasticsearch.index.query.WildcardQueryParser; +import org.elasticsearch.index.query.WrapperQueryParser; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; @@ -123,7 +190,7 @@ public class IndicesModule extends AbstractModule { registerQueryParser(ExistsQueryParser.class); registerQueryParser(MatchNoneQueryParser.class); - if (ShapesAvailability.JTS_AVAILABLE) { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQueryParser(GeoShapeQueryParser.class); } } @@ -147,7 +214,7 @@ public class IndicesModule extends AbstractModule { registerMapper(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); registerMapper(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - if (ShapesAvailability.JTS_AVAILABLE) { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerMapper(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } } From c31117f1e8d0b4daa05bc528b3fc30871f85302a Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 17 Dec 2015 18:38:46 +0100 Subject: [PATCH 114/322] Added BWC indices for 2.0.2 and 2.1.1 and versions 2.0.3-SNAPSHOT and 2.1.2-SNAPSHOT --- .../main/java/org/elasticsearch/Version.java | 12 ++++++++++-- .../test/resources/indices/bwc/index-2.0.2.zip | Bin 0 -> 83456 bytes .../test/resources/indices/bwc/index-2.1.1.zip | Bin 0 -> 71994 bytes .../test/resources/indices/bwc/repo-2.0.2.zip | Bin 0 -> 81369 bytes .../test/resources/indices/bwc/repo-2.1.1.zip | Bin 0 -> 70133 bytes 5 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 core/src/test/resources/indices/bwc/index-2.0.2.zip create mode 100644 core/src/test/resources/indices/bwc/index-2.1.1.zip create mode 100644 core/src/test/resources/indices/bwc/repo-2.0.2.zip create mode 100644 core/src/test/resources/indices/bwc/repo-2.1.1.zip diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index a5e2e38ca26..b8ba0a411a9 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -268,11 +268,15 @@ public class Version { public static final int V_2_0_1_ID = 2000199; public static final Version V_2_0_1 = new Version(V_2_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_0_2_ID = 2000299; - public static final Version V_2_0_2 = new Version(V_2_0_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final Version V_2_0_2 = new Version(V_2_0_2_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final int V_2_0_3_ID = 2000399; + public static final Version V_2_0_3 = new Version(V_2_0_3_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_1_0_ID = 2010099; public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_1_1_ID = 2010199; - public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); + public static final Version V_2_1_1 = new Version(V_2_1_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); + public static final int V_2_1_2_ID = 2010299; + public static final Version V_2_1_2 = new Version(V_2_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final int V_3_0_0_ID = 3000099; @@ -293,10 +297,14 @@ public class Version { return V_3_0_0; case V_2_2_0_ID: return V_2_2_0; + case V_2_1_2_ID: + return V_2_1_2; case V_2_1_1_ID: return V_2_1_1; case V_2_1_0_ID: return V_2_1_0; + case V_2_0_3_ID: + return V_2_0_3; case V_2_0_2_ID: return V_2_0_2; case V_2_0_1_ID: diff --git a/core/src/test/resources/indices/bwc/index-2.0.2.zip b/core/src/test/resources/indices/bwc/index-2.0.2.zip new file mode 100644 index 0000000000000000000000000000000000000000..2f77405a8319782becdcd2426f70ce8c897949e4 GIT binary patch literal 83456 zcmbrkV~{4{mMvU1y1Hz2mu(x}W!tuGSC?(uwr$&b%eLRDx_!=k6W_$SF?Z(9&4`_m zKQh;fy`Q~y?gw&`;1F;i|G1iBUuxR6PHhvl~1J zD8wTu2nYn)-xuZn)6dcW{c{6XLtS$lBV#vRCOQT>rvD)v?tcbnV{2sW_}>I={TCPU z|IeTd{|0OIU%B4@FR(xUD_C7eCw(X5e?$LAQ2&#_{v*IJApbxg<}j+(`y2DtzefMx zL^rjvHPE-BWubF)`sWD#Cq&wDvXWqeXua%00o?-6GCx}j0Y#E|7|Gg7k>PsSt~s1d zU#V$`Kif~KN}Wz5gXExNvUtSRoKEM9Gzs%eU~pF+=e4%?pG~&krzV#>+P>!@Fv^)& z5=TmAoKK)^*ZJk8`eOk;e|`(c_uO2Pj$tB7Kh`hsDlAf`QyxcGNK2r(Aj}aii=}U* zWC*!r9w+SIqO$L#l#es#;xedDfOA%75kTa&{KFV8U5aZVEXS!LHkiF^{bmi{cC~nv!LB|K%<;^6<|YDt6&OWD zhE*Z-hmbH0OP;eH{RixJxq)K)*S4`Ma$=vp6aAAC!FXzn=*SFxkv`*XvX?Xl^fSt< zLBd%6JUmU~O*J>>>bzwXAEd~`IzN?(FhJ{}IzVxy4#du!q#Z&l)ZpkF0w{LXmQOH$ zftOb{-zVt*uaJMuzMucqK>k&Yx#7Pj_kWp%{|e9e--Z9DIBEZbIRCGynExkK|6O(c zt6yCIGqV4vx&MQM|E9TraQ+f3Fw_65K(D_h^8dv7PqouB{JRkU!Tt~Xmr~l$lTA9U zp%t$dKkf&YTHc0+mN>2u5zc&npNLk87U?7a1}U1>7ABfRSbz8Kn59J`E!x7w$Vru&(g4t9cn2DGEW!e3IsMWpyQm{DxXYTzo zasI7}x&2pF>>rdH%}FwW)_?yri69^X|1Xrf2F7$IM*mjU{=w|=zeh8v3py2VsOedI z$=#&AolSbCve{Jwrchf=g*mGOR^taTXTHG0p8%+pRb>!E-!N>3 z9z)-cz^J%UvSYpYpdV7TH6I_c^iSvK*Ph$&UOohtW$Dq%CDX52iY4VE*&O+M-k(7q z$iQ{~-=;D9FXoYgoT(cA)&QT5yS|Bj(rhiC z!%TEa3Fxmp;AJ^F( zxb|dkN_S3uRPPe-RVzMV_7nGae4CZS_ZefK8@7rls5Lw`1qjF01D?zQFJKQF4q0gg z>7p^`S$<2ZUH6dq9iHhC_H?8w!v<%HXJ6Z9)Xd@Uep9}_L!;m*vLVgM>DL2>*2ve> zj8=1hNw+ooa9r;M_Ima-9+|x=FGySphbt)CdZP!==(iN{Fe&B_i^psvu!HQ z4ft(2T~U}}{T*n?(z{|K)KNfdGS`F7B%sJ60Kd)Ogo>|VO{UWG6t$E7HS!3$M?UDD z{0MUA;L&V0w|)z*MWyp#LxYzy6d6)}_)wh6RpH8*uq?r_5p!Fnah6jjyg8br(Yc;g z23{j1bj2|c|0diG3mP9ScxUcRu#i3Qv3=9O0Rufj=gvC1fK99jIQjuN?kqhz}u55WuPgtKLWei#QVMT&W zXDRZSmf!1_YbG~(&1%@n0<$x}*LMBsw9W$?bGP2xTw4200r?!MuV_rCSrYo4MMBt) z>RD{7N#kbxao%CS>rK4r*(2~iJ%+0z2|&^(YqvjR?jwc%yWDdRsdZ|}gxNuo zYxES~*~!~0h2&re166r}L-ToG+-W1GCm{NZjJ;i%zdJ$W9T)w!U4@=tf66UQH2;U} z_sMP5D$JaPb$gE@^@Ol#k(`2p0l;Ich#e+U>nM{LwQ*Y#L6zHW-%|CiM1$m4*?y?4 z00c67YM0!##Eu^DUaHxy1@(MD|5+-w?d%^2B}-+)2$XTOcVrH)QtROAeoAdf@>}7(rHP zaPzas0B2fm);(}8DwEwVZ6cMg;m9<`GC%1;1NFS^RHVyamwL{*0UOP&2EGI1VbqZagjDp`mea_4Q!3V^^yp!{11Y%cF zJ0eEQqu{FxTGGvSmFi>JK2An~@$J`bhkMQFKTYCj=2Gt%-ZhYk$qZjwE>;6sv;{|K zRt`ur$3i`BsgNDFSrR#=u6_i21=jV^91dp~*P(Nt>Ak?Lknb%K?8OGyo&*jdE0fM1 z(y{JY6~T0xMT>20HS)tIQx1XS2UK&z`Ts%4E z!SQSQMR!Ouev+lX;-KP>94taLd<|}1;L-KiY_zhX)^GTYdT(J&+fwDH>d^s`EoU{y z3(yc6Fj?4N`4Xy$4}PS%AF|SV*}=N<|eaRAMvGq#P;vV`UVXqR^ukl?QHA_2&Hru z9=z?`YAhzMS8~BV^Kq_MGKT2OS-LgHR?u&c^i-XK`f3K{z1@OdoTuP-On{Z!3)1yY zg2(X1w1iiOPw!;D{VI9+XS280Om#H_HB}Y&sTWFC{OzjxO&d|wc53p+k%aZk&+)GM zc6LD#@Fg#%jMoxJPo))UcuZEu=CO~KtT&(TD6K)P5F!)<$wO0YRb+lTzk5ANE2 zPjV_V9@R;7=}sGNn&awx`t`M_xnb~UhUgE>c0ez`c+#;>a5+7<-27Pkt3e&p*`=9_ zWL!JIdpbwY%x0P?aSGh@MLPlGPit z>lAsYl;(rd&P^vPGlr694U}n?_8O2&MorHB;E}(Q3fqc6W(5BT@DM}b?){Mr%6I-X ztS4A0a(+el(`mdEO^ox<@hg&(UhY5TwP*m(%G5~N>UWL;<#;O zNb!~_Mm9iJTgJuY1}p(u_!~AKmQZ{4p3fZu^k-oNqX(wrz7QTt6_vjk0V=8zJCBkz z`E8j*RWRm#zo|PoKYP3)`YVmqg2c&X7AGAu_3zh(T*U~c|dD8aztJdh^cGNEj^U<}$L zmdXzhS*n@<5(4>1*V%k&^l!B-Y^khE9uH`Q5_x)uubKWiooFI3M_A$9$G7px{p4+G z<$+PEes<4`c*m4{sWugYVDQX+JDM&G9ZgpZi z8&Jt1r2%06VF8o{8aX2J^)JkR&+v|zkZyWX8_$k2=?P- zI7EeD?;_7VxT1Xyp(}0wPl^67`jKOIzPU zCs+-8t&V2S?u4=?ns8e3mR#qmV9SBLk3XDjrfiH1cl{%%o4o|~01^{!DXbBo%H8l; zygj(_t|;t2H7`T@#jzDwEw(CKFYxKQG)Mif$P)vjj)9nuE}*=66Y^@Ij(CohBlL7Z zLN)nAf$;9U-BOJMGJ683Ut3T8vZqmCXPtHjV88aTT2z_tEvnRk>p?;G$0o{^S2nx- z@oz+0V?;t^8VADCqx8#ZrDjJdd+fX_O}~kTYMl}Wl=#cdnlmAse5(LrO55v$g7ZQJ zg6QZwpr_=#Yz@fZvON<|a05XzsoSKo@~Nb>_Gz=aa~s4VgdQJgq6oh|MBE*7`~(Li zI}x68i~xS#EPKF397Hbm^0$gNx8TZUV)c79-kjX#fj<^5eZTc{qLPInYkGZpLWPI% z_6s%C^(U0U+6~YbbQeN}c{+FydB1<^pj`JqzB>~lS*+Hsu9)fA`Bueg4;q6D;2@Ph zO>Y*i;0Cd!AtyXm?uW3)q-gG8+P{Dm#c7RxcvWg7u%%lfF%z&IK)cley4My{KIN#S={#c^1d44=6h$q~?41hcCxSE0CM zkvTbEGK|g2>3y%t?xdRo323+KD{h1z_`pFDH$8yq$J@3NU*&|$G)t$MTA68eRJg66 zfzE~hAM5!^{{C;e)o~uN>AL_peL2!xxN+|g(oX~I?lq)&aoi>qrloI}I_#=@%};{M zuKtQF1B}^&k{{o2PH4%Huvm`G1z1wU2ih|UmfYYa?!aCGw3{(=O0gVoIVd*nF8u9x zxU2I4G%$-zA5xhUTNg&c-4|R{FyW5e1#u~$F!~eh0*L3hNA!y({j9I-8QdjUPYH7M zH&_9sT-zi=im}&O&M?umMN?3L8)?LSCr$b8Du2{(L>4KM>C9@fmUsMU<8esESYtd%&tTbm$KFWVMERKB@Us0_;e zdkbRTLzV-pw=YN~xGoLfn8AJRMiKH0oga?p_FjLwBPtp423U+ETP088T8K(ZQF8T?BJg411lEkY>`7aHqKF%T9@rt+D_QcULt`RziEH5 z;P34cxP80(Hg*0@5qjWPn3*&7tl(~{co0w$J1dvn7 z4KcV4sfR!>It5ZU)(7Ch_#GR_^k)BXQU+c?nqu3ucg@Sp`!pSGf*5#)w(YruBbSVn zq)fX86mNzJ8`&xieZZB5gu6j4P7UQfQ%`v{kk!n#I`#lWp(`+Mt_6i7L)Q3i9bc+?({|Q+Cs<_WvozqBJ8xdI)j$tGL|}fU+_Hg;KtMiFkhLA$2&Epg zsUgvww`(7<8?t2ivxLn0Zr2n?f5ZU}*fb2IckZ~QZ6%2&Us-|A9x{7F*d_Wx+2;1+ zZi4VIMOPDZME81${Z{Hr1Rhg`_&i^Tb{yivuK{1#Civ1 z2lwi9i?$Hct9_TLLpR(Cdt5UU06utNtqszu!=Bgbo)ZiO&W=~k#?W2~s<~}Vg>+zy zv$7{D8V63|266Ru&1h5Z6O^JwCf9q|t^hPwQdGeF z5r>|I$p79?(fS2AqrBzDKXHvGb{pRzRl_087|r@|ppI~2W^Bxmqc|6HX(2wA(-d#b z&>uzmHZno`5~RasxtS;|C8;XywPD{-;rG-hOWO+nE!v2c+2zby{FIsDNmI+_dec(S zCHe#tpr+XR=wVO(gYknoQqPV&d)HzrN~Y}9Toi^g9G;FTGKMTt)SRnkB>7wSW#BYO zXJQNs|E1_XubiPl?)IpR>W^PF=vm9hhrXq0D}PL&ytgg;0Fj_|gzS^hd6j|CdAj@s zsA>)a74PzUuw{N^GOC+;zO53^sV99gEOi?KlzZnyV4mM>x60eg{F((tD_U2A{4x}i zOzz0F`H-<&D{?l-(`{~8mt?zxn)y+f7?l}66<_49A;dpXgy}xw8T<<=-UQ*zZd9jM zef#Yab8Qg#WEj!g=bI`E&U}EJBgrYJR^QH6d!^-)1ra@FOaC;yMSd*~$D|6w9e@sP zHjKpZi|r2~Q|P>qR-q6WPtaniY=B)nhHGZpPKJZ?8IG8oRtMJRF;Ho6BRyYNH}!i; zG2tC0;Mp|Az7KegbO}(CKE9CdV?wI*v?r0~3-uFdEpI z_Dg?XQgEYBIlqFyJmU?x>3uV($Eog@REBbQy$?QFJOmjYAQJof87X`EaohEAM?%rv z#5?H|y@CoKv*bQ006()l6mEUi2F>4b&)a!XWhacJd@h$<>Q{C{+CI)L#jh+lRAa4q z2iHQLw!Z%zh~|11Y%wPLld7wmpmG7(PNQJ}Hi=gFTqQ}asoUrWNwKWmNh7~?H%_5w zf`{Wu;%=kOC6aNh#S)7_MH@9tPjz;O2^;82JNkZTWvUBUAC~mX-uyK(a3W~u3zPrn zTF>;$m~TM|#}_oOD5Ree9jQv%K<3u!6XRsO=A=F%7T?)>^^2CuJ0C6uhzg5&$`3CL zAJB|uKeY{0&P0r%R2W&Y1p{$D1&|&b$V&F;W@yk8QncjscyLT$CP0QWIMwXT&f$;n zhPuwMfL=Vjod^^^7xjq&qdpi-S%wbCO1p4gK2Iw2fHzgA`CaqGivo~_a8g$BEQ^tk z9r^WQ0WFs3*OcV&i`;D5%5zT-%>OIDruRU-HZ(=Zth>@Uvqu zi5R5pJIM5aCo{1=%GA7z2pqxh7vHjTITWf47o&%=rbE*&@ya|v#J3dXQ6}|SS@wT@ zzZ^X8bNRtK$lwM^AIddWp}`#r3p%z{=>hA0Q9^Oa!<&;GS84{v^42;;y`;ylWxyi8 z$qB(l9-ViX9Wc|)T*#c84*cfKuqA#jiPYB>xC_>)%m8NG7QfWsx~i5ae&YnZz@E11 zMDq(MT}mEF-`v)*j}S-X=f@o&>Owg(Bb-9??}_`Z(@(c432k>>(OAcE)mglu7&uK& za|}CRM(qPMuLm;=^`c}y!-$6A7Eymr4crqe_T3UeY;Rre=Qn7MR$RC5L`l}8TC7ws zAH;NUM||*TWL>=Bt|S2ZBeg<3Bkn-j;hPHCzGIuOK}f?tuSI?Nb#$wuOhd7ynuc&j zNygB?KA;TN_)+IL3D_3uckR-rYx3y7jOpr0_&_Xzy5RDvaLW{@_S1Jm!=&k&kIX@_ zRmXYK8jjLTGa#q)-$s zpWsxsIo$>XHzFwvp2J_$`9yl@2t%2Isy(VLq-yO&=%qXQKX;7VPuTe>gkhek0c1;? zY43TW7C0deibI_m1tdR;HgKe}iF109JH`yAgQ0B7k5nt`X==_*042#87gbST1iWNj zwV*psP#lX1QfqQ;KxTw@WR{QqGqp^#XKL0qW}dmjcE(WF1=wb-zE)sa@a=1?eyLGf z3=?rfie=!M%%S&!pU5TI61aEA+JfKyWun9Ku6g9{7M0ith2>HH5ua!yBD+uhStzNu z5J6?WrXYC;8FxtR^V0ZgQbrV&&nl>`cTaz2Nd*-8yYgPAGRn+D`j@f)uoPn)g#}?1 zeo$yxvzjT4A~WVdO*aPH2d&eW4ewFak=w1L+Y3-*0;?2xq%>AD?dFp}E_G|m`g6#l z7&*}=qjPl+P)&D#puwD!%%cTKD9hA!QWb6=c2zK*j3QUjWAiW|kuaiQq>N0uB&}L& za>tgy-G>y~;AEeB33bwC)L@rMsHyX}(f_qKFqx0NrCSCz^%8zX{)`H%N95gbX7UB; zK-AU+?(p^z?k;G{?3qtRe=y0}at0h?+Fm(GlO|5#kg_sPCP+Ag8&SS=??=o5IOr>K ziKh`bHPEvD#_^!o;LNM_!Uo%UqIHw-fN{@ zVai^L<&Z-P$G&T&ZHZ!muiHQ=Ky1T${wjmyqbrWT?f>A4us@$&SBSb}BJ&Xb)g0NS z4fJBHCcO*-_i{c`wmXE0p*v1$;H^osSQK4^v zPJ>;nY^mMHXLdyZp3tqH!q703p%+p3!y>N5hEuNsbDOz?GKEo95cM&Gks-qg@jjzt z#rEMpP7xn{XC=Q7B6TxJs=!S3dXn?V0U>bZY?wGcP*W7hmn5TXyW_*yoqXWSl#Xq%lm;;JD(YeOcAvc)!h0mCv&9 zEw)H&bUbHqX##Dm2l_jb^*9dLpbcb2MpXd8%}?TBsT$3MS{++%J^t;rd7Bn|FH&{6U{Ma}b5{ za5KmR#u44a`?ECP2Fa!nlLZQQh7OC*rJTq*`XUMVJu%Z6CCYU=Q&;vv3V!P3dVwi< z21daMzcj(2O}m8OBdEF_4gMBwlzSI9K$}WRsw-qas!p#v+23AS22CmNvh|C^Q1bb` zC}JeC_+WCq3@hKam9;9%nJS>Za%qOi0(VgyzJya61 zY@60=`nf}P7(U&jH$UiPvGbK0FWK*_r=T_HwZd7+lzohr%p;uOcr$FFJ=UUT4T%dP zYK2H}sOY?Vi~PQLz`#_@hX$jmh`# z#HbHc=awx4Z{MZ?kU0a!Q)A@CLNodh06t$rgvdCn&+;Q*Vy_H`oYL_3yn9Lj4L2w! z-R_YDMHW0qFu&$)^Z2jy8l!{;q^X&{IO;$F9fw%1pHQnWb7QspxeOUcrnz|uy7M9^ zzpk}ASt1+Sz-oX<)ZmIwT1R9Xl)|uDOzA#15rN95LM`kAye#toBf|@_RPjJvDWS#j z6$_oZ)Q(mK?#b~{%^o-aAuk7*D2Sgjs1ZhC_lT9*u8}U^ zw+i2M%q$WVi2yX?Ypn`_D8|$1>0P*v_bs<%%tZ0G?wKCBf_#pR`!5sWQyPIftjnw* zaCC^2F)|9f!g30$3^0eVHk~D}aqy~7)+{ne=f>)-lv9tGmQ8&S;G*=%wI8#}zX|8@ zG+#X%)g6%(rp}dnn8A?0?f!h~f5feAKSwa*7V1Qt?2J{!=3rFB#Lcy$KM>Y;3MNZA zKiRf}A3nmE(yD&HWs#ZOH|DS3yn#E(G^6Kn?rssQykRLR&5LD?D3+z{Hm_o4Sj8?H z(7Wi+yD{-9RENT4wt!Dnz5mq!>y#+lCl3BtM5ti{1&i>VLWZf~mJp$(^?UUWSm`n? z4yg~jE8vc3srdYltn%gyq&LO1R+5t`wU;0vEI0bZoAgvb2zQRKwT_&GBkz0tmJIhs zKDLk5em`{S;{(1#wp6_tz%gfU#uGbLJo!S6vZVQ3c1$!dg>zA@t? zFp56vZmX3*?T0!R7$589gWR!;`@QKX9KWdJ+(^phr0x=I#WKK1^OnSj(=Dwyy}1!) zm(*?qg`RH?pdfNbmeuSB_R2mZ*!aYaKSu5`loEBU_^<7$(w@f_2&7ibr-4h7i>0Uy z%hH$8?>_T23RtTwO(rjKqS7IKKB#8|a%X!EJeF9CG<#CYgQiqAyJga9KH+JN^D#8^ zIC4=sjS3uoo*TU}2L50hfKW7X;?I?iiLvVUqFjB-ZpAtbNA(s^m|4)VhECAuk@fM! z24|Vt+7mq0pp{aP9&;edCa}$uG zss0zQp_%epr^L-5e??@vrBKut+ConO?h~xhFXX1WAIRklA{z>StoSSBD^0$aa5Cf3 zzeis6zk_+fh-WI@1IksP#rLRU9>yzJRo*B-dkLqVxyQ%@nUI1h^4EkpXeo1ij9rJ+ ze_r5o1VblijVySW#rl|8AXH zxM6Nk?i-g<&Y5OBPt;Se2ZY3&F;7<@DZ*%$yU~M{l1j6CAz5)RQJBtp%``)=p5e5}GEZ`%0wa%JqOFI4@zT=&;lQnW5uTdTije<3BO3>t)CerplQW zMUivoXW7(db(8bC<3^ZzHQ2nQ)0xjWy-supFlXy5^-8w6GJGu!D7lpwf{(6fyCE&> zPY%t?dq{q5>b^^S6r-O4N#(yLqi>ZI3QHZNX% zQ5qW2wszkqBG_Xw?+$n9u}E3BtTVjY%Fnk$nDWnuVJzf+X+p=-c9gk$DB&D0k)wEI zLwzaF`TDIsy`3KS#Nzj5e%ueLSzX|ir!4g>tpqGHc&|;>7MmKMmw7R3%H%IqfBL@V zMNs<@M4G8NIhPyA=u4C+L*TYD@EyrA?7*`m-E!ry!O%C{%xzw$&~C zJTzl~a@>JYbr*ci&?8U?m4AR)tDY6rm%&4yr&rPZJN@Fwz6?tnlvBDGOrs5+KLK70 z?bJKOpJl0n*+ri8y>oBFD-LbmR*PZtef5J8MXRvXvPjjvUqaQ8N#dc$)oY+ck6ed> z-i?QPTER>UD<8IDN1_ik`XyHkU(X`aZRz=X8UIi6y@N%FX;6A~Nb;z2>Xml8Rcu^37dG{C%$3ySOh zF>&{dBEymUm8f-5ocs_Jh?ZTc!bb?xcXV8eU<+`|*}--8M5cb$_`NGKEy?lxT;Ej0 zm4?*e4HSRaw8VfvaDL1p?ss<>=G5#uA`K8baHUiO=4t zTtdvAMU0_tF;IOHZ1I7jIIrB5C^dM`7@&+gt*Ev}H+$o9bFZ85&B|JEWumGU1}xJ% z#zxy^KM#@)KCcVh=UA?wN+|fyFtdSTO%qd$6sUnhXB&dxIr(y;sOS(>RTn5&J#@KX!Q<$%@cHpPOR}-qG*jrq&ZG!3aQSU~XBH&!n|PwsRdM*v2!; z-5DOf$;9h2&q<}Tah_e2u@+eWp2>CYG4UXdHKJ>1rv30Cv=vm$FRpmgTS|9K-y3>J zcV3D?3WbwtR?ei?IEWH^C5x47g|&5$2axGDdac@T#7ystIFb2L_ux&*P0msxA_=f1 zitjFjjEgNqn&7I|QG#MkLCXb9%;N2_CP!cf<#eP51TMIx#i8};k5dXK9C_KwnQu3R zIfN}FUZ}7v)~E16s&ZuJa?ziV>17mmOf4~@uvf;9!eKo#iI|JezQP~l~0s;fk`p0mo$=t@xuHMcO$ z$`RuAD(Qz*TCV0A@0kB#HAV7V3tBP>cIiL*i_$&T6g57%3mIO5rc+><_-cTXib8Rs zE!K04Fa6bXXv7aC5r%Y{=?sgU<6s7-782YfuFt}^4yUh4h164&zJoRs;(LXkBlPT6 zf|MPb%;t_u0)6kKwkVThIsm;#VT#`6eJBk?(mW|F^hnz*6u$K)V)TTmW|86}EM6(6 zx1iWoMJo0?zIVsiAdeu)3Fe8BJIrDCrb)WvkefSq`cvz2z@_;8?xW=$eqX6Kpapw< zzvD)X84=Edj(OTu&K`2x2YL1L2Ku0(aTQnvP%5 zhAk;Tf6Ul`&T6Q5^upZa;p{l$_&Wt1@Zfz(i*itO&K!__=lk1S{$1G_xMO;CmEg{< z#_eIUhq=Alu{!b^Vnb5+Fap7Hi9I_>bd}~DOLXM;MR%wJLV=KmL(wR{QGjcTlWT@7)i;Lv~ z8|#w8)nz~J66mZ|G<+Oo8+egnNaCh14EhMs=A4575? zXSuyOGKLb23)lzI0#EX-t3sF3i4n}$&8$inE8>$XNg43VOx+PG>IBEF$LL&V99-CZ zk@V#bWW>0eglf^P!dh83UQ1Th+&W@M;LqG9J-}y$8_j4*7bboP--IW1oPD@q6H~Mn z&qA(ZDjN-KFENG#)xHWQuyI9T_o|vlXpa3Zm)WBRxF{FoY{eZ1Cn9J*6yt9Q?FX?# zP9mB~9TnNb5>NN})3+6XOMDeK3>xTGjOk(JUO0B?nrcs)2rwD195BpNE+{`#44iPG zZy;ou+*w?g_aLYllgOGIC!Gjr%qpb-Ooninf>5b!Z#x~_b2Xfo+gsTBx^&a`R5Pq? zhKC{VqQ_4|f=(o1Q8HQ~qt!JjBZkLJ)F3g{Wf@d19k`$d1Gy#0f9;1UAu_ss^o4y7tl57iVDF~|0JK5NbY6uYM_@q;q2rvWaj)W&j+N~Y;Qe^fzPF1pziKn7Fe z5leRO0cydWXs&2!k}0>w)cNTv9rBiw;Jzgav{gb!|H>_=iK4^~FTOvjpX-!Dh&f6Y zU{1clc2qY`{N4;5&N+$LjcoVk!DHsum#OSbISZaSU|GAY4u4fP9G zO#nAc-@Mzl8^)p;&V^L6DV#jUVTA%0{m&fR9M@qEFL{3Y<4ZjlaffuO$((m1kD4Uu zeNy+fKMbBJQ8U*~3_m&_g!q1z=naDvIjLbv1Xy)G$e|wyQsjvX7eP7`!r~eqd$OL6U^)(O0ph%a=|so@Mdh zZbvI4o zSA@c929y+C5Quf`KwENw4voL4`9ywt&i1|}Gdl@Zo;qKto`Iri5P12M)%ZY9c(=>P zoSX)v8TBH`TO(|2<@UPt{|rw+t9Oe6T;3+pl2e*hO{L>z7QXC(buzYAW;O+lTOIEq zzZvV|Pb_-=wnY_W)UDB|CBZuOff3fTtzg+T9IvuGLDs&H zZ_>ao2E+XEW6C!o6ihU8G90u(S9~WOGQ;Z7lh1c;DV^f%%NjAF_;?+>KBR(iYMyQh3M3!-sD@e$ehlK zY^x#~X7yAc1}?lvujQGa;#OV9P3(bViu<>w1x`jZTz$B=5>b5ZVTic8p`G0>y=Sadi*Yl6Clfx$eEOagcQ(dDlTm;mB6C38py}?hene z^^$~E46B|GxIg%jJkOcYhGRB_XVA)x7gnfvk!xip$?1r@B8u8QjcPlLx}VqMz;1@n zF$~!HwW!x%kC7D?XpO&cRQ){$#89te7KF)ouro}T zy`6S{i`TS&@D7LdPAHaRxN{^bG@E6Ku3&ALiLSYK%Lvni+6qd}luKQ|GI{bd-=1HJ zl#w&TX-@+2zE+9s5mdDe`sE9q0^_% zs|x3SA~B!Ca@5hJEQqCqIZ%qsy;fLHl;D{V<}Wv7NcWUBji74cNdp*hMg>t8jiq53 zAyqa8G5-iRFX0u%J~C!n+*Dq`NNn5Vroj;}S{N=+gQx1rO7{J^-Eha1UO-3d$Um)3vQi;GWZt2~Obz2qROSn14M5i`M^Bs+ zS#V-eW&a7`|Hio_4~ItkQG&kcBe>AV;q_yJQaNKjG7u>Tk_H>hAm~*hb31LMId81N zB^z8!%u(Y|w{yVk)3Y-~K*lQzOqyDaG}M?J@fSo(fOH;u@b5m;T#U-=%n{MPsTQo8 z)i~YYv@UV;gB?KB?xn+bSa9%_Zt*hlX9`1i9Xgyzp>!o~`7-inDy%QzaM}`z$JrGK|Cx55QQg5m-pPSRs z`wdH8DGow%E&BY9U`YVKgGeh(Oec66=+H)NM5M^LhH9$pi`qL77jZ4YNrW+offDVJ zuzYI{3*>F8BZ)+k*95q4<&WZ24v|GIl#80!E=zqwp_RR$D%Nc3s`P&V1DVjLnkF8% z{s~S+TVx5b&b8f}k5H1Cm(#b31A%BS`B1o{?)3RV877mH@- zH3#RUta3>Hu0=$m6LnT@cx`#XKI(XTKfjL4pED@Zak=%^ECo8PApD#9f=Lg1SSUS- zx(>Xv>xg9Bi@{I@BDJ>UxO#LKz)hhL#~89;-49)6N*J+26$$%tMKs3>4fSHDS0Gdq z$^Z^dq7x)cBzrF=mORCP2_xDChFP_1U$l8sXNg_6=B>bKqf?@ceJ&FD(qzx7PvPN>J-+SZ`H85bM!|*wXI^Hf!CzqNFRxbo&Ex^ zeOLmKd=9D<&D>UJUh}@!Y)~Ag*=5L?7&49zP_eZ7=Z$8``EXZwj-1lZucXty9Q(Yj z4cNk!#QKofbUi%^}xD^5`ezHWh$P!EFlWZ%z$_u-<(ym>I;G^Gh1Qq0lyLB>M} zndXSA;OkmnJeSyU{6!I82An+@ygA~(YqCmRK!GK&!C<7jU=A};cYEKd1@6yj5t4O1J$yyCn{=-LRT!}$wqES z*NUBI^Pxb}P6%Rd+NUNp!QzraQ%t(8^sFe^geI4IR%C@8ZM?yibmNXRJ$zM00jEJ9 z5Ppl<D^Ve3U|pybE*R}>{dj~*6V%jO8cf`_!vi1E`xC-rCZw(*ClFM$&mma}+9xFfXK zn%A%x8e;YU=eK6+jtL0CCw1+NnS*1eJ)V&v-NtIKMjj&>Q8@xux0D(brCWEb#w0q_ z9+lUPL&{@eKYw^Y=vETz8dw=RCgqhxDAk-Hx3o zB#)6#&B#m-XluI!!|@5-Q*A!Y4X{(Vi*4gpp)5_Z!YPQ>7epOFR`ms+s%ncLa+n}| zlKN>$YIb81RB$5PvB99)rEHPP$1)sJdOp%EPsITgGca&gzuwPP8DtH)eo{5l)(Hnnx> zD!_c9dhwu%;qnzV1Ue>`zCt0PN)$I+pka5${?k-CYbL!cZL2F=Ae*t;D zacvT%{KYK*ce&#OfX6~THpdx~!J~B_u@7x+3W>T!>&Po%DY(G>$iyh&2Ffw{g;k~ACQr+Fo6YB@s;oZ|lgFeJFuuC`wuT%WJMn1K zJ*-T55BNs()p4?r<=K({j>c05+zYB2oTu73DVV4dR(_>Uny7k9vx7KK47hVd=$hg~ zeMY*=X#Y(|w=iKPPtRAe{HvudWA53xEzG{Hb9@jo+g)WUraP$Q4G-$t{sU+(>lrha zQr_>bNlqTC%M(}qb|@SDQoo$^sVp3|R57N_kuvbH+1-+ZUo$W7olZ}tragB!{oT^5 zAiT9fgHgr;*+jDN@_D1-ScmTQUt4FjL z@S>Y?I(D){MD_E}+lVB@J*7_K6g16VPF_B9&MWcR!68;3s1N?Qf|96{TAU6N43%8U z@xA)@du`XlAU$UHv^d`H3v>{-#SYFoRh!=hCwGP2>)R0|mo!c-)=5D*>QL@jvYHug zk}N7CAz-(l6wdbR4Qf{*FzHm%=MWG=UK6FGH;!IP;wBF+i85fn3{edOQ3gHW8du0h z&El;LHcB`x=#GR1>q5rfm2Q;tS8MFjeJoM))&h6IZ<%twi_MA3Y=d>5R;>~TZ83?IL`JJ3MUc0d>bVrx ziAoiM9Xv}}QjUat!Nn7c%G6a_HAnqx=E zFfmE%g3`0|6O)Bjy>nY_=~yyp3>)JYQ+UtmB{?y%@3im^zvDva5?dhF zdBe&+w-WtS$1HGnLa#i(re%*vbXj+gbaSxP#*;S}VK?K;mgd+AVbhqJT$-xbXWuhL8oy^hXi{GHzTNlklo_qgRe)vZU zmWA#mD37(7Y%uG}yx9Gn@~FZ2z5Z_TzHxCqm1}w2S0yu8}KDmy^5?c}S=ma&><4k;>tNBwZ#ZX!J()3;8 zP2Ea5k{rw+!p9lQ@GhvLuZ?2|e};&`qKqt;X2dD{ffCwMPLm_e+}hHidt zj}lQR1)q;s-LB8sm%o00zPHYs_W3>aopO2Ef;;B9I7k}CHyX8mA`Mup)f!gaF41y^ z)A@LUDZ!H-4>zGRZabgJ8T5AT%>2dPLP#@hy4jEDvS~ zP5Gg7XHDf9)4A#U{&RhM=qv@0!CJ`#C^~O&u7TFHz)$VmOA=8l18JN@YD$F{^8soT zhx?IlQPV|6w6b-bdT{_DxZwbqy1=$1Uex(c?q7#Llwab&t$m^mv?e#Qoc^d>`l6DW z2bg621U|spokGSo-H13TcV!K$x-4ZL9>8nLf_4%HQxqp^@fhfHu*)ZxXjCTIfbVNs zqAgN=1&i_M6m3c`a1Gx^JX1{XDs|~W*^0cYe31BUY3wn%TS5kVj#-+S0UR09QkZ#c zj%mU7e=1yfDq%XX;W^n5 z02hPYwlwS;=?tN|08}tk)*E&n4RL~9Xo0(Z7t#ZgNVDU8esCLWql4MnQ4dNLSx}}F ztfsWX#}XXsrq6mTyt7W3p=fm&4=x6_+<$(57^PAAXt5UC^Sjyh#_0-WT$QokG+%OT z1{W`XX`q+e%mL1^#iH1>+O^m*C^$S3MLlH$duAD@Hk6cr((4Yy>v-Lv&Mk){75c&a z*}XyP)<>8QtBMa1$SuxY^CIxSCKMrj3Glm?4Zgdo1{% z6(1%9IXo4Z;LmHRtUXdeq>MS`LM~5IaAsyEFFP%shYSJ05RnAiaL}qxSe6Vy_wMD{5v4>|!8)*Sgj)*k4P;+7|#7M1Ptic3%BKd8>GUs|x6mrs^U= z%;H(Ge#55f9j-d3bq4LsU)zzqZZh$R(8i6bUYtUHFoXK_8q=rE5^O2(um-sK<;g)> zlhq5g0M20@PsUVbSSljLbY^E}F-OSU%rr#zSIr)R&4X>HFDbr%);$yNd`#R=k$rT1 z_5~#d!h8bkdD6bZ-6KD3hxL;`3~hUcs*RKd){iWpOE5mjQkRW1WzQJL`Y(bIeN5#; zh=GXl zZ54g}DkPcAdt8mFgzG0Q#wAO_p?j%Zs^rxI!5EEDkh)ReJ4fm9PwsR;C!rBv@t22* zFTPijF2tkZeGpBjYdR+0#RPZ4yLH-*VeY&V)|yDEGBlx?P%Dk^*WRPs1m9#=JfUJP zQE6~TtRljX=(FWEqO3tT3o}9%gF~{}T$f zD$*7OPJ*?kgEG_U$Y>3TLbv&oAO0X`w?CkA#ynXLqX@V*>wx4u9jx1~ZM$nZOS0h6 z26MuFI(I2+Dy^AW_F|Z0_C6G;7;>+<1WmIj0~V@PPg6FH40& z;sQ_OKV|Nqk#dUrLW9UT?j5bIZb>edD8}*tgPkJJ+aKexVPG=G7QI+@Bq@&0G&vcR z-ou_n`_%1eDrT;tbbIoLh4z8t#DkvhGm-gz6wE4@yt5D{CzY!FeNyyhr-E%;{>q22niWaNCa?$Pw|v9xvX~IVTeT`m zefHHS!#h?k^*I4vKzK0{~Dy&Gp0j3E)ATX*| z8R&D+p5a6YZ4-Q;oeRAyk~R|A)nB^XK&54YeO&^#Xf2VB=2AUUWCY(%1^t7_zHM*$ z+(FU1iEM@S`oW55(utg}&kFk%v-YxMUOHc(3boXvlR;Y#bl~|r{hHl}p1QY{;{;8V zfT@z#;lyVBc9&7D6_d+dr%$a!(}@OJbwFn&_YrOM9@Dd}b-R1eVF8zB!BY+;;R#ioF$<=}4S4Z__ri%q^qC79H|@Pw6`_AI{G4`)U3328XabenSpx#w^Jl<39Om z&x?c_m%z#(Y7S>`ET!R-lf0t!R>gRF+*fK_YPK~HIl_=PI4QFMPZZ4f^q#|Eulcn6 zsL^A{WXTgli<(+(b43j~|6F7V5t=6o@AXY$_m9Tnq$^nYco|&WyZsz4w(~`dUe9WB z5Z8=Sr?-Z@c7j$w9PAZ-ll_v14ok}FRqkMigk}5}-tK}eCd`TxKAuAWM;Zt(6Whm2 z3(8_wds>=h2C-c4U&q)hDk1)N4PXXC|4le{Ji?Fue_f_TtAYf>?h^A}pd`DzmcGn} z^ie@P2G2X(lbpGM{>6@A2{?d&uvP7~(L)AI5NMOV*5b0cpWQOAu_LAX9S%@?N)QU^ zPO#N5_6h{>J&2`Uin|}kd7l0uL*OX{=dOp<2IZU13j*uGx0sz?5Xi8z)?JqD4d!xq zh)x(!T|G5UxepE2?ZFL;Px&)ASeZ|iNMes^{Z}(MnB11Ga=D3v!@#y4o3J$?@W4CQ z%io>rEr|u|m@t*|*+0QupNUKOjWf)`*eWW$@1X^|@&Jd&p`6>y_< z!r*4GjV|Sb`Sk~@cAomTUh6BzpfN3dRBf?p+V4tm9GqvZYZMyoR6g^I;MT#Xo$<#0 zVZKg8knb#{ySI+4>#08dP5c{Q;D?GW9NScr^#05<4{`QVp3DgxauLn0b~sZ8Zamms z*r*Ws=cN)=t;1tB+zj`VX_qV(svkWmaNG-7d2MgDy^5orK- zc>H)6FW-{RL)Le#^4T>YDp~I}qx8K>^ID!;9YL+_>4k3KW4PyT9gxZMt+_DXXOt0? zhtv%D-B|l3rM|dt^)M_-2lEFhOJWg&m^CN7{BK;CdU7Uv&39QUtNqeT4#xr`jSPf3 zbd5frvG5nxo2{_v#LWjs!=J%Z+7~uI+qO~rEiKh+DL-kncl{P_$$_uBve%x!C~6;x z75((bY+vyh95@T&M`r}RH#(ejXNqXN42d{Xed6S#)Rxkq<`&ej%au<0n+=>Hq<0Wq|`z zd3Uz7DN}IJ?69W|*+lGJQZc5RcrYvPfCZ&ur+(!@q_&^s1rt^0?v|(F@CT-d1pt`>x&P94t1UMm!#- zscdz)C?41*#~_HT5_2G`7^ydT5OLI8iACG!(>srNV%^m6Ihhk!P+h4%%MmE{oZP)@ zW<5{ztpYpmUTx^llMI^dC)y*wbkK3O2uPLqJ03G*<9#}THr*<7G}cz88e{rHA3 z&1+$oZ~?GFweA7hBV|SJ4RBa&z5!gu;|g9lzMmzAj6o|1@Z%QNn@>VhbOOux82Z6e z*zT^J{ukh|)qMrQeKMPXYxN<4cE_$1F;ax=Mc3M?iB0h@e`}h{P{8S2=L>XuWXgDd z{;JSjVfbI1C-}`NranFZBU!;*srwjREdLRqa|t23o$3-=L=r$T2`v1ySamyRW0dVTOmZ>NXFqQ|;wp)jo$^aYcQ}aKFdn z@yv(^$?^QV8#}mm-JC{L=|nR*AGV767rufr120+Hcf`i6|6ohsdXVYhk8dvcGp1uE z9GG3jvTy#+3Ih7n%5M?~RP`G;Rc3P;#4r63d>V+FXgpF;Rh%QzhJkjQaUoYef38h`-Y z+oZXFTIiR;tBWEk?t7S?;IX^r?g)b3zw+Wr#Ew=F$%sd%k0y1d8C*ykJG}Lo8a6UW z2{JmBXqXc13V%lTTKGcE#f1qA=j3VnRlmLVVUm8S7$phVak8>o$diJx=k$sSDM?0v zVDvN%!tXwES^0K1;vZl9lF?a#{@k9_XLAtTmJVa+P+)60_3aMXj~^*Fs?c**=^{*C z@xmDfa8z{ux(lB8X}5ol*!d@RrGYTquxmc}JpL0&obr?!JV{HADH5v2Us zvq%?$%0C5Axar|8_llTo2I>WC1<3U#x+KN4oMM;6`g8|_F{!en^=wEhl@>F;OD#g{L?f0tH;5IsB;I{+}D&g3!3aSdHDk;gHv7QsR$8m zK-j5@G$B~_$ISy(uETFAw%n&V$_7YelUt4Js&S}9OZ9H|rE6`%%J%{NtALpCE~9bB9yVDPURS^-t0>k>K$-!s}I@%rFPWZItz%yf>L zIbTs2sqsTJXai;Eg}Z{k7}K_3J>IBGKsm{0M!tbhOLjZ<(okn5mAPX!_0U{1{n7JVAKOwnPaE*9+zyJha6`@E&zm z_@@WAR^{{^dD|eJ4hW1j~oyy7d1fM z#_HV$2W5)G#fZ-ohOrgwDL$|ZJYz}eu83%f05!EaxDR+ zQ;*{0TT^?j`uS33w}E?L6lvE5$1Fft)ygo-l*; zP_$Fmv49rzEkQB|L9`dN!;26g2)n@aV0ZcY{3-jSCo~oRcRBsVaA|YNVgz-V@;8a&{*TX|cYh5>45rHH6Nvy%&mGN{P$Su} z%7Rr@9r>$(SRz^$OZQJaU1z3~=*7yl0|3^D?KJK&*ao9JA7*6<`O}_{9OBEFVyB1c@I5uXi~Eh3?)5Gq6-H#2 z59K`03QD)ij1MXcX%F+ovK0K2i3u72xptkD6io|f8GYovGF7QnKQ9SmYj0pk-BsGk zZ%UlrBPruTJgzi6))sF|<~zNpX~)h+SR_O7jeP zNjXw?L5n&WlrE@s?P*Alzkn-ud2pJx45a+9AzOneiYKaW;=8RF>~7g-(L0*L@O@+~ z_MZD;uUpAl`MzjY^4lo}yrsj!YS})|Kf@)}ROeE|ws1u)W6oFYm215XeN~$us_S0N zA_Xd7hHNO3yhy1N;$m)gT&f|pHFav&?v4mQhQ!F!4SS&t+n8S;+DK&UrBdyU$ob$| z^EX?}Vjo9u`-H#4km$n}u%hqxLL2B<6e*I(HH_aKY7uIGfpj(pq*(W*Dak%K$L86! zv}oI&t{e#2mz| zYks|QV08edVBm`L?Z6-n(Aru;H~E{obC(K;nKoqvWPrBpDX1ya^DO?I{(XJ-Wl!$H z62};(3kZQgfq{<3aoG9}55z=fSv1(V>~W~wo3Cr?^xl`g!a_E4vfj%!=$dpTYtebl z-Q?$39d)k$>J&9#eSg#fS5t)wbeogSXp4A)1xmsMVQ*zspHekgOJ8}Z%TD&s0hqa;4;LMFR`Q?ihd@y%#n8vT8Qr3Jpnql^JKdC zDyHfexo!oV@!R3wm+70&UUJ;@8jO1W;r1@F(O=D8WFmem#oEZ^B{wnJ0XeYW7q#bw0#BpG|5VZ2@x zpqr=fK&RDx46usd^WG?SJC{I)8PLLh70xohAS}Em5}ZB_*hHM)8k(sc4DzsrhVr%q7SbmH77QQHc%uW3C zM_~nD@t-wUgUrgN&lODp&@n{O1VFor#y5d;xg$4AoTM=gGf|@f zWLSv&ra)awHE%K6=U)-|+FEzfTAN@t^Xf}qTe=wlXV+%LH`s^W;Ll%^FV$JkLJx72 zMjp2Z1rbCB;Nn||L{*AWZgwh5t#{mviyu?vT$GRLp`=jIIj0x?YB7l73wy%rvT%0d zI-)o<{;W5;TD=O31=lXw1LRz=>KBwK>yN_f%#2^`X1I?Uuve2L7->EDxS zPj}dh@X4b!7se!BFbR6~DCW!w$V_0?w}^!y${5842)fuClwZGSPL@YwQ-nE;@ z(Gbg2XYoX}c98V-!0ba({5KJ83K7k(O21)X-*=qt>oO452c*W4TAKs$ko`fCC*A$# z?aLN9_(Q*i(XWT<^%=K^{ey~^LTS)^3SXk+HEJ_GKCSr0=w_j^cECJeLpX_j5Za<- zyjFCN;tfh){W=eKl5RlLw>AKlv%jLz!W6s?c{Gx&1s}%XJ3#=J7>Sn|#KGDEzx-Kx zuUPnGM8?3S0sBSB99sY*s1|Qb{9KlChFUOAqZl!H-x3)LA9t-S??K}zaO8FN1Y~EfJ zA+46E`H?-X!A=inEIPKAY<|e?P3~?-QHVhtNC1at2QMseMHgw>6gN)T7#n73a4m&&V*l7xp}x?bMmLimPm67~iE`{M<&OpC9s(YshpsyuNiI>S?;j zmX}p1iQjER`p}!P`|EVXH8s`vM4&6FPH2sG^XuB%w2ytdtUG7yp zpQQOvbGkM`_!W&7uiXa>4Yv!BO4!%J+#Z5RH@{lcA=7r%ZP0SH#5fIq9?B)zf7O0F z{BjxaFOrolH%rINHhUmsh?d(cK}K8*uBTP*L#jmt*%_8$ zgRhfomH2+=RwZ-xsvEJGS!-LsS66t<*!>T&X3cSi(~MIuR}Tc6Q3UyX6Q!3|o5YmR z0fv10XyyoW#ZxLc%v&_QPj_qj=!ABaL^lyQu>mCg0iQKk$lNxYSuRP9~7Khn{=c2O?wXV#jI=M z>m&z66CXlmmWnoXyHk#xC7h~W6`G%2zQRgJ z;Iy);?~`HQ)vMw5E>Mv47lc-PG|jyZCGVDc+bWhK&3Q8rL(-S}>UfT^$6@aa|Nf{1 zr_%2YW5nlS%WF`F|9}QjO|JotX!^cjvaSTss1KN`l>0hV!)Ga?b0GtJtmcs{|BB$& z5PZd8+2RlcS5%-|CKo&sWHuo%!-NLQDuvpF6@S>Pq?a~LF4=I?R9B4VSaeOdZ5@km zA&>oexr8|0wEV688!bqdND^0;9g}6uY9#9w!y!u6@Hsxw zR4(TaQ_G}oG-X69-=?6bjt*On%Z8;Bh~|;FR&#C3_Nc(@Q8xpr-+aDU+_8`|x7^eG zkyz%K9sfNO?eQNRSQ4p+ImK9i701dWDle3D#-nM*%}wyo-Z;+LtUoeBZ|&8OSq{QwklLnt%caP-9BPv z)cY11mCC1uQC?8&Xu+lTg)2PMIPy)2b-{&)2H9C9AO-x8`}j-L(1z0W{AGf!MQHV= zm9yg)*1F-xC9D@3meTY>J4a5TY%NTB#g>^&{0r_#=LWn<>U%R1PSnME|8yyBH@v zY)a)Z9K!g{u`u6rV$9TVPyhPdei_kDaZc!(;B3vju}=1iNfh=~4G0Ay zO^??vxue8IQIJCBT^f!RRL2)rJXv3NDlTV$(;Xb_`ad^4{hid zH4xO?gx_0Gt2Y`SR!G}aPZG)@$bq25^ELo@OGZ4wOVj4-ZgzLIX+T3f%_}n@goA5k z#&l%oo(VL~;#W1O8o%5#p<&88N?6;CQ}PYY_*)ihDomAoHBPKP{t2$p*DS;>;T&EH zZOXwzcdaV?!>iXkpZ(q)+!<81RLNz7l#pwH`ETzO*!p8f74?G>%iFY=O+MrA8b#K{ z#ay<9n^JQ4`NP3r(4U`KR;K9I{v>4@kdus|)pfw=A~9xMS|juWJ(rSmO1J*IJkWq~ zxGB88PTY6v12qgrUh&6S;$>R10x6Y*)2mq!| z72AA5*xc{|Z3|805v~Z^ML-EU4Z2~`UArtU3PlAc@sxk{T_VFdO^>H4Uj>f^nr#Yy$Vnb-Ff+&fsMJ}UFA{2H=H zZR=7sZJ^VcrSS-!6CE6ri$?>ur_wvQJ{E8^b%b`Q* zPfnM-KwqIQ{zbz#)ZJ?%Vp27N!4r^jq_$?jDZH=)6?oWF4Z~v<{k&Alk$K#7yZclC z_v$xwNH6ve$f?dNjQO3O0Ip8G$hhadjUr^`4(}_xHj;FWV8jK^Z?05qvs0j4-vN*D z)No=L)T2{`z>jUSF--=kX(o#?`X6s>%IymjIYiVVYCs5}6J z?lS|ir)l76B~q=cr#yVbYhmKpbZNmuY6W#06x8dMWKn3Yr=Ju!&8M;wo8a-$fB}s2 zHfDAwK@vQ^SBbN1-0Yd>CQhHBHgr@8plpuGdBl!u4=uTBjT$PqA0_E&UmG}Q08tq6 zuB2Lk_0YTipTjIP1H1ei#aWZM{O%XfwW*M;UK&Abe=NH5N4~e`FJ@F>OMKNOfiR zgZCkQdW_z*Imuet3;Ys#-Zz~1Xbxayu+!|HpgD+kO|m5V1lM`m2<6$JjvdMJ;)u_n zet!-9gA^j#WuqIrrqJT!5QnCg#i!=D=D)TP%Vr@0)jew9;5_t_6%qmYv)P%L+cm{- zI%jjb{_k=yvC81Z6(jaFlGPIxw1uDCWJc-z7)TZU1UB#2NAUIwZ@X3cwW{wsB!<|; z4RtF7rxwdWppzpZWcIMiqNRj9Fbm();R-2L^&m(v{ajmvh_%sf$Og{LM?|xga_S&i zO2we{`O7aPuYJI8pmO)iKs^OBiTF_p!>X~cJow?C4(G3T);Tk*f=1YW7@w=6+gL&# z=-YP~NM9I&VG#jqaMQA|#%?CsbepLcBLOFNaaMoiBDNQdyC0_{c0th6ponl=F`+lU z5*955dCY}T$ft0CLgkP?$T2xeYv9q*iOzCVN92=BcJlon#JgN9P%m1)qaep<#YCXE#;BnVSgvUN@Cd1V|TRN zN2ynu_~3~JZl;~m&gd6P<$6h%S{PZdQv(nK01hk$X_1lpN|r!JY&}|m3>ap`(EHrj z!pvwhcmKWz&nmqHWp!Tzhe(0}5y96}*tSOGJBm!QC$E3pY*nt1DG779jiw;dLX zJ0Eti872Cup`AMZm_P5-qm3={xB{hlgKWfCGnZYDW==c>gPC{~FssOPcC}IR<1l*U z(wI6^v>lWb`USc`NW+QWYf~Gi^^g}XVm8r1B7?c|sW>6Mc0+Ei_d^;{{8~+Iy^9qn@mswG{NRR> zWgagAl;HK(j21cm3XiW8FH~&CrB?# z>8Cjf)D`VH)m%ym@1$i?K zFcO$D7h}os`Q5ac+|t_XuwAO{1^$$M@clvn8nq>qUJvmVyklNbqKLK0RXzRT0%MED zsXDg11(y~M64f=r(7#SQqUB(!TBypqO5r&Q>nyoBEsJby>wRo_a*t^XGyOS`gUq|j zOVid7-{kSWu7g+a5zT$~CGY7t=w~>OY8m3HMqpB!XG%S0_44R;#oHS1+vK#a$mB|u zZpVjg-_-IPux)Ekm7w(1F?`4J3gyZeVMxOCwaAw&Oy zM!gIJU5ghnUIUpTt!vY4U8Tp!t%_zB0!q7!x{qu#mOt}(+&;3NwVK0^CI;M0%WXwp zQu>5A98+xv=?=`rcRjR6G5ZddcSc0wl#7k4HD{D9zpwIDier2S9`e~Rc=Jy)`A{YJ zE5pT_JtUCFFXjU9DV)I;G>4VU+!s22r3%A(W=*w=##haf(&;pnsyt#WT~kI^dWpslpiy4=p3GA-#+5B+w>lDuv}ZVD?hQ9*-qAQ z&H-2Y#C#V`j9i;=g+V(uPNfPCj`Rc{SI&#)d@64=#pmB(k5f5g~?CQmS7lcdEY5yFKzg z7E%yT17wreQ{l! zsG6xGAAo{ZIPscPIoPFGC(#giJa3T@X|}s7{O+N8yv}Li7s{?aXn~?TO$}ZUzXNZ; z6GyOmvKZU?{4r5o`(OB$xc6IDwvD;Ixl5kkr-r)7{p%d4 zRvTO|PJL+POBCzJqO|@GF!q%LWpirNMv*9m+!6zs2(EAqLkJ7;WLfHOS98i8oDj30 zWL*};W+cn;)57@JZy;q{P5*$3%R*Hy4Gn5FCJk>~Q-LZYi$N$M-bACm&@Hnb%;hbx z@6bmn)&$8C#wsX1G)LDaNoKp#Mx(d?pH|!UiUedu z*C76$^7iz8!;Ogw{d#Q$>UB+1gGmCMn-}tyUO&UG_+mjaO-0byQTuDXu&Mh5GxMw< zVkdxaoaO-y{1oIY+Mm_hK0o$P*C=E=qX&kq8g!m_Ji&*#O=NS1yQ*WROzVB6E%43V zvd)%CnKpH)HzID@S?*mr>xU_|RI?Jz{9dJqiXLR{nz*gRu7>upS$a;2FGA5HL&K&s z`Q|9pfS7baL+rIgWn?qTO}=DZH|a-d4rdvJ$#E(kM4m$_^87gaA+!N@PIJcW>=D^y zArd1m47cF$NoW=c88m3ZUV!bztu}{N8T7wQ=?%|Srdv;oGga4QJ3hZQuj`+YPfqcc0Ukv5^sI%X~*)`h89#& z6UAT!k-!#su4aZAkN`er=>c`}HWfmBDweRjljp_g?Cwr}A;#vmg_1uV6V&C8^)hfG ztMl;s{y6jyd|kiks$gtY@flbh*TN-%-R_Nil@u;mM}fAXmgC57QLnBJOu^{Mviuu} z(@h2nO!+$P+5}8LYmIy+7#l{Y>BKYFYO&m6sS>x=O!l<_lkptL|r2s{Pr86tceRwxw44C#u$}PB$_x9D2{G3oSd>eSH8Vy zOzNYG3L>rL%}Or55w0Uw2joCcG3xfGd>M_rJi?g(_Sm+^7dNo0g;zlI{&W(gLZDHn z%C@a_|7FZRHNm$@BioIa7mVHBv#mGdUhZW2yJ0AIide=wcc`VahG&SvDEE@OPRE+$*F z=L1*{K8A;|ItdaH%TRc>qd5jQ(wcas|OO!HYBS9@@0 zmlV2ow1t<$+%YYLI_iV)TwA-NZb(et&70rz9;^L#w5qk?cPbP#gnZ-J5DRVY$PD%4 z1!^J|!(w9eBFh>qN+!6LVjB+g8eaK0UnBCTr&y-xN`su5=kc!mIN`(T!IbvE%#Qfu zUQg5jA_eMUBeAea3hXl*$>c=qW9|M)Dgf3>L>eWe=lBHU=v|W4(V!^3r{5G|yB!Uw z_kzQ^aAbz~)@D39QlvreQf-U#a&uFrg5n5s_ORqB$(r+;2*atlZ}5@fAQwxDqsVAW zRR8OsDDF9a&vOw2nrC>6Hu#Nag%q7UhPbhKp+&c*beW**+*DMZ{6jVM(iYZ=xGrob zY>&gk`7E%zD&{7*-uYR(CX{$?QK<{{B1!VxfI)v zuae)z);NHh27!ZzgNut(DGyV}wrynQ-D{1OXnZ;8rWtH_D_snZm#EJyBz)l_O8lQN zz5gMu>R%@B!?!SNi2n+4cDDaVafkn}xZVGd^{psHfAW8+sCD%q@?It~K_#Fb;VaKS zfTO#xo+LT`4=Ur}5BxYS2u?bZp4K01OkJxCJ=9!vznBnZ9eXPWiYi;#85kM@=lm(U z+1Wz=KtVYVrWRpgnjfFAfOo-i=^zzPPzdtSvy`x7hMjRQ^=)M(<>U{QrV);->nKAkj!gH;2Fb>yB@PIK z2YKu-2-V+u{Anv2%48okg=VMhG4K!6?eI4GPl)FKu=cU}?{KbfYxDhAY$ZE@$^X&X zr~jX|Q}vUyFTV+mqaYCPCJ7;&^_rj5oh%gVe4>1rd;Y_R*a(U@t6gt!|I_wpt`sko z`kt)Tf6avd&)eht|2N?bt%S7y&7=de0{(!4S#j@9MtlkN`Ux4|!Iy#zbhMd{{7({i z-B9wxzUi~O-$U^Kmfh%V`QJ_czd+xbD$Pg_FcVxyXrFNYw1H*%c}@bCh@1 z)OcFN2Q4Pp5GUk>vP_WS%4e%ba640;vj%ONnsOsEGStxLOI0>+99&jQw8d4oYk&oc z1->lqF>mmPL6s;eIceRKnC65@S%M610Hc34qDD~TQy>$dms1FF`Y;Xg=F)q-Av>up zggy@>#Y7qCYCB_>nv^EpA5qo@%toTrl0@&5Hkwk_f&mK#(OMQTmo_G<$^}4QU*=~c zD%EH=bZSHOr!AvDU@7bg=YF0FiW%zxJCbuWoNnV4e_LhmQ%}a7>ok(Qp2I&^*8Y1I z9&?G}6@V{w3O>{XXQZwmc-WGe={kLm)#D@*h_WH<;S}`ypS~FCKyghIArO#+Z~CUp z|K;lG%eoqy+L>}N>oYT&*c<=%mHb~oIIp_=bdpZ8INMjAeX~7qLJhsmz8?f5hB#r4 z1S6m$sIdFt?Zc!Du2dd4nh-)TF^A$|5iW>$E03Nl$a|)|UqK-?dCGl#pDJ-8GJ=BGA8+TfEdjyyUD(mHbP6C(V6aZ@=cMKj#O^;}6 zfIgDs88VG@^V_ii`K0BmfPyT9EwfUK>cMC$H-q!CHhmfEouT+0XussdOkt zI3S8K%28}Y7EsqzJZI(Lq=T2Uyuy%{yT^zo=>is|i%elo+NqgoK-glR!d0%VX*l5J z$v5^}DJkr7uFt}HaB-Xf=5cj~#m+J;2 zV3cOQGfzAdjLJPuv9Z^x6-Dk&3-)xlm*vl`%F(hj_De09#y;01#v_cG}(nP9aYda#xT7oQ8b^DN) zWmO4T&Bt7_I0WteuaP`~T5Y2f8RKvPuK zjAVjYw)w8;6EK;)tZF=t@zLICL92LDD7Wy`wMUQlsJn;!DzQsI9=)Wr*WF7LcZb0j zN!~hsw*a(Ra5#Mqj=e;@mf*U(dt9bG0Br{`qn<}xAl*EKy^Gz&yMKvlrvxCT@TD1? zyEw-m2llVYby06ZZ!lV=_aa(J#4l;Nz=$u9u*R_J&hezFRT5G_lfD5GD|a9_e&`Pq z)N@16hCQGK9siJqXG`D6FK4<4=ppOKV(gFK4(O_M&~n6WTUadJ9sq9XsxI!PD@f0umLh#MSg8ztE?jJW6c~$#1q5HyxqEq;j zIJH8(fNdkhBLbfr{EeDd$^%r(YoPoztlnaL%L9ltIy zt`n<=iZ8z7m+r%y=Muds)-x8D!pEqm3}L(7l4=P~$nA>jqL>RV(YkpV+%%HVQX1z- z%dYV6>GO&Jw4u;9tK$3Bs$`@ZADAi=s~c+gnFE}|dhOj3=Tzzq8m<990w0q}*}nz+ z9V^GoEFI)M+&!&?cZ_>j@(so=y5l>d5)h?bRizpJ6UEN7-^>|Gl_iP9!Dk~{e34pk z0s9nBF2Oy_@D*;O@F}R+q$~;|p(_%>QuYwTyk>Wt5)0KdtQlc_<`A+jx^Fk9D!6QR z(PtdopDBdbo%EUGFnIVxQ8saF}H{m)S&R8~}%+^F3N(Z!>69 z%8Fj9%I2u4q#JAt6sJ9I0WqsEVX=6k)XM0CrN9>@TSp_}laWvAks{j?&lvi3yBe;= z(2=XqTw8WQ(l(@Q6TdyS-MRILPI0b1!B*iz_=q7~h&Pf_*-o_Hd{Xl2!Uj+IWk6rv zX0{IkXt8`MUAo%}3}u-uwI ziWHzjuQnAiKwp{}Qm{^s7_zaFl|Swr-nBa6wiIX7gZzl9b5P2H^j8FJSkeWTbw=DARIO+G$N5W&HwGS!PD|CEHP*|kq&_)|vG!W3oP0+g3W@_L8F z<;f_|1e~a;`EKjP%i`btWbMk$=*2i2Zdj=*XJF*;iqD(OYSJMmM@Ux(x@(Pvg|>nd3|!pi1U6 zEjQHa2creVZ5L7QaPp*hS8TBX#gfzO1JLh|hOxV)%whq)eTPzfq2)mNJIr>``QN8c z@m`aST( zw;Xw8np##_$T1w&ulx1wysX|`#AyWd}%56~_2RpOQgOx6+cvb+hZuPGbyRG0F z#DM}l<}hU&pLs4>CB(Of&g$F#)s@wXThW`+8TI)8ij6kJ2n?*G4xZFY5zC2)3IWq+ zPs-5T9*$22$dA&A1W$mvPCCVqD1bR5-=KA_XDBYy9X%fVILkE4tN*R4_$*_s5;wAF zbuC6Z{(Mw^-rO~FMZ>VBI&@D}TWi>WJ6B9Cu1lTz9{sAEfM-{TZE3u+2vE0{F6hbA zyf&!Zjodw=Y?w!w`3n*tyDRf|J_>JlMy87=p1bXxGXosugo zZ;;fC6yY6@^S(>aNuMRXB%w&v{6%dO4qWk`G9Q0CCA{-H#j;b_(YgdlldxNI>VxCr zG;>dp6>)M)-KR@TD%i-+At|#_r7ZZwpah~Q7)syj%%}}^X)(Npv;@Y8BCIyy6QEG} z1^rIb5t%X*$xoNmq=G;v?4xKBC?>sArrb*lro6!l6DCzDDw=APAK-+zO*KjB)pAe~c_r%OY6*0xE+ya*j zM5n3*K-3WzOOC=g-w5#$gczZnf>&*Zn}K6-@tT@9{A;nbLkh>VqN}=9-X{s;hUHGfmaaQukc081gk zAfd%an{Mf-tzp7aT6HCpotO_;j$-#Eg1xj^LqN`C??Pt$+L-wWB6YIa320;EyqegY zZlT^@JQFfvy^P2cZaY*HsqRvrhHA2_aEbc+2gOv^N8z|)Q{5TF52+QgDdBs=QWarANBl!8)fz5t)Jedm0{pMle=O5Xkl{3X}e zV>$ngwR4EBtP9umpGs1(Dz*qqRc>Mwo~gK*UErPTS(=^EcSRJK#o00#1sHEM(vosQOJNzqNb!j!B+X! zh<(iD`gduF^;}Y=qNI$WQnOy0rG3cDCNse5$TmHfv{H7mZInG|bKgNB&n(_~B7`n_ zrHy~cs2(k7kw<`7t^tpInkeB2@9<2@HTG}cW+_{GOzz=NYjby_^RRvCxEX5<9_W<5 zl1w2zt77dl7O7w!`2d%dy`%Oc9K7Lvvl0u9BTQ^qX@!Ic6ZK=V#j|oSO$47sl9+q1 zMpl63wj4(vu2VN6T6Arc2ym zI{Dz?GKJtaL*0!gQ72wEsAt9z`AOH4>8$Hh!POhN1@vDi22+Kf(}JI+kDqbq%2RU6 zVhQf-jFML2tmnr>u`yCfWWYC%evfM)9KH-ZEyQvDdulR-VbY!G^UHX4Ctm4)*oD0wZ5ZZ7NcVuL*zSP$S{O*Vb}0}Y(*V6KOZgJ!nqIzP z37d>p*64`UMB(16*B)CxP^7~exK2FB0p{upf}WaX!xuFghi1&TV_2AGLB(Vy&9tJH zi5!SF;+AYmBF(%;h1ThkGs5Il(FhsBO?fO!JP+iT@6G5+H30TBi_Bk zXew+<;`+%p&{hI4&pLOTHE1H=w2GVwdv1v6+IM||;A4I$9o4Ge7zd|Fo-rl_TDgw> zIuANx!vB-KRp6WniCbuMK-a9`7|e@{%?DXxPk8bzE`dai(4iGMozC1}tmG3IT0oPq zATMH>T$y89U<~Da6;R!sn^J*;e*f%fA1yac3M#kH*fj53(Qbm(fN+!EVc-W;$}AuH7tmV= zndz(vaE&?kxlWo~!i0Zlin+27t?)igsmhcutoWK2sJg~g<*hLmxXa(iDmb=^8WZ?z zg$$m2J#Y>qne@!PN(bHW5xlgfA$}nEwp}xsgOwL#x+9Q^T;^U3I(f$Bwd4@_Ne{bO z!WV7bIz|{%?F4t4Z9xGA84pGb5iq+Co@!yfbBGv6%GRQyI<&JCb z&Mf8xmM|BT)%TfNIGRBYX3l4dj8ackvT44~?oUc>>Kn}y25Iqx$7xuhDklQXwbXHH zkjAGj3I*f|DOSOzfw9i<0)6CyIg;;?Een4IApe5B#OQQBur}Y*ekNwD2kzrZ zz({Z?SYd>qDRFlg|@mkG{>Rp)<4$lt$+KaSj zq-d2>S4AF_G>NNr2_@3Dxm>IEhz*$X8%50%Oyb>&Df|V#yI?Rg5Vs@nG4({vXVtld1=Gb>rCT1+$Ji^l@Tia+W<|vH-nhA5Whv-$FGucISO>&HHt^i3VX-sE`vkj9L5hJ(fshXUsE9+S7 z+e@(zxBA(6nOBGy!WCGTaSTeY_d zGvCJ}--q-~JOxpmYe$er8~FVU{o54!;F-~`h0RiqM6{^PH)!vmIl2Z(UNVX&<@NU{ zCrFiRoVmIzHU_pKwb1I=Wlb)In#o80ngNTIT}v;H#k!s+@5$^TVcfEDx#FSy`~$DS zJ2RL|*Xd|~1#P2}ex+0nO~L4${O(O`2fwZ^l;IA2gviRx`Nz$o>cs+V6vhEynlrBB0fLB_z_ z9+np3A@HQk)D@UGjMky0vE>4H3x*+I*bFs(zK&@NQ^N3R={8}!~s^%VrZk1a`73IqOE0`|Aq?4v}Ckw=!A;Vi!cg2&QmFS841LJk6wd%GN{jO zo*ePUL|!GG` zI_V^hl;m8u#&BtFiK?7i@0L81a8+)%=%)O{)oSBG z8Ml?B*VmLy$K?jdF@f#nENh$_Rg6WpkcWKxfurt7MgA z*MM8YCHQSLk%Y8+mWYsl+j5Q>b{<}2UEgdtE8Wy+)T0+LoP8~pSaa62dWoxoRdhoP z=QogdT;-YQ@jpkSsrMTa#_dQW$Ia-AHiuSUM3gMK4ntY20Fq?;VDC z6cWkPiB>M^Sa*?2UdUD_@nV%qdPiZ7sT2Ozc*L#nW=Iy&lPjqU5{Ay!QTj{Nr96Xn z#btHsdd7AIAtoW? zWErAkP(VyC75gQ6b^hEe!T^^>&{e18C>P@Ue|+_{Vn_RLvOB8XU6n#~$dU9IBI1tf z;!+d(({gN&(dWsiE$1L*k%C8%Yr`*OOd&nj#GLBw#TCDFW^4)MRqPTD zuIdHhMOCxxh%{7C>4G#OxkdLNmi9PJ=h~Y*SLN_8;5)hxUliKF9a@%+DRj?8X}Pu7 z38!2gIfFU?LBrdp>Mx)h&E>Pkkqn@@Y(lp^j6w*7Bo)VOnTVa}>XLPJG&V-3=AHWj}dCesdE*21>TSQmA0&qJ1 zmxMH`NT&ybMZRD}3Pizjo57R$vQDt?D_iIm?X~WAIUQtc6 zQjl9>Z&ZFMNzA&0{?VsJkmKl<1%ROC;qp0fN98&@Y3lwIV7&4s7$xvM7$=R3noh3= zhb~uhEpT{r`Z2oE=O$}tOk{%d%3gh0PzZau_r9-~%sqfo4Z$>j|K;|)f5Fp)@=C_*(K&ucY@D#RP`#XtFa8r7n7wD zr%F@>#^>l|8}i*CV()}fn_?$EI;AC==LF#jR8uGF=28=G;0c5y^D@?O6ykSW@mnl8 z(Os34gtD)3&SLw{SQ`7xW?Yb*%=+4?O$A_eDij-aBvDJaaP1CuFD&=-<(-bwLz#yv z&v%gt2b--sm37*pcz)yHQ{Hw$G&DQ&!)uwO9kU;Ag@*9R|B@R**y0jLh4z7C8@P}J zY<5S28&%cNImgYjcmK?iGZ~le=%uVv^@y|WQ7B3~8CzMEd#aGd6IxJGR39Ck(*81pA< zOLxIG^>`lQ%Nw2gSsJvI)?FVhbQ`Z$Fy7!FV_;b}TSPjpA7$G3G9M2jUj2`|@3G`x zEcRRO96#OBY~Mdlq;gyZ+V3@~j_-@sJ5K~?VXb&uu1?`8{PgRt3k~%!8`RC^lAAW- zo9^yR*Kl^>Cc>Aos)U}!BbGCeM-L-MBTYl`B%w7?`zN$tdAkFHSh&SU*4?i)>kq7_ zQ~L(9M7mRFGj^_J@#(g2$A&k9x;EUhH(T#p`S)((-Q4Wn)u}+dL)ZTD`}RMug!&>A z{KI?ZKjMbA{GSQ;d?J6?|9z(U@r`)Hi{_El8K7}|;Z-%!KjFy=r1{lVuTh~x>m>*} zx9Q@8NW0_d$V9$33rMz~#MsW_Tr*@J)9$KVu?mLJtG`^btlIvAXQGqWbY6K;89Nc) z9F&ShJCkmJNGA;%cdegdII(o4t=dx^4g$ZZy_=l)_}P40&WEPorLUilfaX!fqcu~r z_RGV!a_?%0;=cFR6&UZ*iEOhCVcV5tFUMHZbP*)RBVF0^+Y@PG9icqF`STg|} z$Wv3f8>%~*J(!S*yfZ5?2!i0zN8xCZ{qM(|#lRM$%aEfKqE|K>TU|(rjC@5!dRJ+klQAZ)s<;pUR!zh%&-P+j)I z%fm9`f4uY;;bn=J7L>0~B!SY_K3PCd5X$&d-ywR#cJ5BUcL=>KNTYfTdF6d4=rbJe zX#c3EpO&0DG}oztI%s>uOyXX?)ZGwd9nr4=enGM=(taSP+VjF+xx%Yq|B$w6uj07* zG~f~X`9o=c$3$pcjLs_fnN(GwPYDm-86Tu%Z~VDVK)2A{%Dw&yMb_yC^@={cipvGD zN0ZI?uE8}GJF4UQ_jRr-Ovb~>C*C<=s52O$+PDp>0`A%9Djs;BoG4z#sl_1FcBtXt z)2r{|{fBLcx^Se~WQ-=uEb9|R!ZPbcL&n{xgy;iI$NiUB6W@swL_{g!%;H6d(fnCF zg;Z@dHSN`iRrA|xD)dpjooYq*KlQ|yy%DsvfiLFJeZgik*-*Vy?)gY-GNCWO1QU}!e323-UqE~sSFEO=-SI@hX7hF#>9qD{zPR9A!*pwX00qphId2okr#4Y*qXw6wB$UU*g@x~H`T^8UV z?E@Ty=c!ks4Xb32*l+ulke|2A(yV?m=z5D#&oZ=VYA`hgn(r@S>(M;}@$OoCWVSEN zPyKCnj{Tj09TVJS+I@Kti#@{k`_Uj^sr~y8pZ_jJ%kAlh=mJx6gc7dj?Ituy3CQKG z9pPqI;=y^fyUw(sztr3IJzqk3T-*W~Zik|&uhaH5f)nLt9p)cep-v3OejxK*&qkj+ z;-=RRbAzRtNWRD&U5=y`6UE8&L*SNy1|H4#cS^0}fAzMmR)+z-GIiD)dpuD$CTj{~ zcZ8z~=fNku79*iA?8YmHzCTCGF`K*18Y*SG|7aA(I{pBESPczx7^Tk&%g(Rt=X~*T zCt}iob>{H_D5@eiU zpKNV6B40c-^`p8)wozvvvoHDv1XV%a5MOC%cAJTTv{UAJzv=n@4%W9+`Qi&2%5*dL zd@ca%*u}$6XJ$%C? z?DD|-KLT8F3oyFs0Vb8I7OM|Gv)>?&V3!A(B$XiTiA=ZsTaj)f%t1YQb6N#(-qJHZ zxI07puWL4eE1iFRgJd2cH?8+cFs}OIXYD0*yP0&%SJQLrDZA3Sg2c|)4RU`!LV8+Y zD;PX@zXnI0LK5y1(cjldWB=aFnDs%jKHeGl2J@2g-p;=v-so8774-XJs=sUK{yJT` z=VHn1J-GJ{*mOV{JyN-)<1RXd1Uglsp;M}dOPrd=sed5;Rf<-iT+i_afV}Y^NrYM) zpwzDlQ|47K)D#kPhGIP$NqaExbBxpa6>ft_Z?Q|@lZGyizq9q*Yp=mlv@djZUHZ8=l<9?qetOxr3(IZpenljMH7WFJ2T@ zb99q>9!VqGp^}y~C&I@C2kv?4HqEexYj(5yKbGvB6^;D{5BxP?qm~cwa05+DeN89y zD*Fryn7xpaVQ{}7x~v(G7-dVnk8eQswPa-G$+j;HknJxatopeAt+hzed{O_%$#EPp ze*SBBkoPXXqf1CKL%KBWS*vw%X^WxDYxq;JN_>3d&M6E*9V0K{gF#`;tKfe95qE6n zvn_-IJuLjMT!XI_Ep#$edR1@OhmH0Fg?_IR9!N4=TAG=Knc|1dSX~hbTGp4vne-w0^hF{tZ3$EWPS z6%KX20|~q=^&h=npgbi}3Fikx*+9f{-v5)=Z_;!u5JJH;nK(IB)UU2$AVWZdu$bv~ zbIAhgFAfik;++3ZLS)n#4C*|rJ827gK~Z6HFjJ0<8lmu^*hTEk(4bsoplN!`ak+8c_D zlJYlU7|oYxt%b@NDEChhoGi@UbHD3^9l;emTFudjhMjvf?B#I5dd%2pUyJTd$oxpsr9Clkz0qU`(b*eO&Tl{;n5ln24n z-ipA4UAWZ-nO3qYiP;y6f^6|{#;MGXRg6dIC7kX|qIH*%aZ8+ZV=D! zeq;>yD~u`l;33($HJ!b}$Doc)=paIoAv$TX$N!6&IU_v23ODkE+*uu)!X^zRG9_zp zJ*hp>{CLQMJK|j5>;bZAY81_)^*aBU%u#wrlTZ0f~$sbn!(EG1St)FvD z)mfubQUyvvg^Prb)Z9J@T|%cOwPuEe3b`sg8hLRzY_(9*OqFap64iu`qRM!?GhXyc z88|kqRwvC@9d}o7;3D0mL|AMJ=kiO8HH%oj$V`cLDc>@SNl>1!X<4`;T3pCdYP;T> z7^27!MRm^j$Aaj;^&@3n#d#;oMWvFAwZnl-HPV0Zs?brYBGrnLs7N*9og+_$Z`R~b^a*uPGs1t&95>f zE9C_$Yw?W`%^v#kuD+%LiD;Nr5elZoODJycLCEO#rntr=566&FKc+Nj$&)Cya%JSS z%Q;Oij8jU^spe16&?oCvaV6jzEwXCo^BU>H>MIpf$1z|~m}N1`nxyuf`bLw}T|;c- zbWO=F-I`sP;s1uq3QqEx5xV2PWo|%hQXUr-*CjA9RsP z&;pWT;M<$dTUV*{8aa%xS>QvcgsQSirdDWurduiH;M8aU%7s??-i2=Zh3r-^##m(! z#G_5I)&={j5lqa&#miKsC)n9dIwU4bPo{x(Nr&O)ItvMrfjNstmmc^Oc8a zf=ibgbrEd)_n}LKgJ^~;^k8YLTExnv?3G37QW>qNCT&KI!=oZKDs!uSl8luWlL1I5c@&Th6es>sm%8JmXGr2n zOeYs;Wb5WCzHcO_|0DRMHy`ChPF<0i^Zqw@-OqV;$s;U(o@KMo%a0YvH!uo6$Kq+XeT8_E=gR^_>%EJJGOij! zcY#fCGwD3AUe9ok&Fx}7WcThbe8K#bpX;i-*_+X{q?4A7$6#a@%nd6Y-8Dh6KR_el z>{3t!Np{sAzq*gIk`}VrYrdURdc5&U+K?taY$93=mRDwi&C`!=LK#Jtc*Y&u%WAqK zDP$Gr1W-qVWMpvwxY^+e(%+IBN87a~D&yqR9}s!tMjQkf&|<-kjC?((NH&poY_28+ zD|t(&?5 z4&T;;&pGLWU2Khu|IgBMd}krJ^m^^uD+fn8LQkRolNX(&YB!AfYqdud#+kwREcF%f zK>LVb;8gaF7JU<04d=Gda*yGFb%lv9qv?4$dxqa6i$+H)o^9ac*nEFZKUvX5w){hEXoI3k)lp}S@HWS1ZV0Ucvflzqzow>@9F{5X)_HiUkvVAmoi;OM@#-7M& zf;MM?H%!y~V3yHtyD^2M8);&t>M?U!ABw{8@5IzFO)e5ADeB2(TJ3@W}{%Rye@q&4B}PIXRP)Km0@%V_qNk0lb= zex%pN1)+Qf&N7X`J)0!j`UVd4cIfJThc~AlRXpTV@jpVYQy+(N^(|z7@=dV$3KJT! zua>(!r$-a^*?UVrQmFdRwqIVsOt9_*zG$mPjmKM8T=jz6_L@6qG~B}M(QQh3tjH_K2ZqC$uTV-mw|-wZ_bsNE>ci++L$_x z`)7a(jFBT;%w(!{B`qb}AxJWVP$hfJY~ZGvX|-ZE-m|%A+q?+^q`(ZCRtuHQjGM7* z>p1TB-(avC1demDHYjT1B*$Oa2jgAoiBvlg zz>QRJWbWY!QcvRKwDigINyv?f@FBj|a6oIdg11bTnC^XoLCUqn!N#5uYfu3Ft6>!m zZIjGHMHwuxe}+>yLMdo*c0rmHKFHyTZ?$j}e<{?KkZcdprwRo8^L2<-F6ok65~w@u zIiuZ(7~te|T3S;=#h7CX8kR3zk3Qcrr=gt*6M9D`M0PStXnlvd!qM@Et>Kh;TLA|F zWa~Kx^+X2aOf|EePxddEmE&B~GM_7zyY!{b1Y2YFcz{K5TAx@pbb9Y?3arWUDnIs^ zoxV${sVzooc_I)~wvT#Pd*J<{0<^24+AFEj3>7JdyHPOzr$}LxP{SLxh)oZd5dI6t zC)hfriFw>SEb#NdU)BDvM#BS9wNb*j-G{>AxN_=R}%n+eOTVaKXQh#@HO9cyr9sanLTsXQSl%f#NXZpSS?_W%jz96 zX>M^0?XYf{-6!`9AO#)7IVJhW8a@}d859f_H$ zv$%oe*1_!@`Y((E_sn=@fG*7zq_jwlvW#rqH0!8w*F)ZXHm+Xwb0g5S`Ww*iylH4(L$~r0jh)pOjW(qF207 z&u}3@WujEmOxjg%n0n~9|Eq(7gYjI%Vk-6pzFSR#DxXmB==V5`1gvZ<>9c{wQUC5l z_}M_>x#|a&zu$f;*6FTvaQ=zl8}6GR?+GIKHSh-WSzaM;{3kGE9DuCg)a6#c^33lFF<7x!FPdFXo{WgB^YH*Yk z1O1ytV)1O~9fjY31|zit>XZ+3Px|kZ}ZKN+#XK<_NU(M zI%yQP% z7ZH~qk~F@Qh;8C`xQGeNU5x@i>KDABw7!Um6P>l!>h({RB@P zN^$P-=hL%(z?BPaGfm22hYbH3#U&2lDhRb9J z$=`46#nwh6%(4mIIiF~kyYAQ{YQJjsz|s*$9R6#t9 zf%D~>|Mdz;uj%EANmj>GSxj-EHG9?b)~Jm{G?s<8n;|IX0I^&FmW*&46CRSv-l#Vb zJ5*A5KQJ``)s2zU^%wmxzN%_dV5+G|4&k_&dEB%Q6{*g`J;UjnlvGEGtH&P#SNVwo zMxvEXt++id>^H?;mtalx+!=St_mi{o?OH*fW3jg-NPi2!7Xo-c13c8>Goo(*c~X2t z#m_PGPa7_Ygyffm-QP0rw{Xtad*u-qE)bSow;@@5P_5pbMXU6e6oCacVJT+!Xv%nZ zas(AUnm&YmnG5c`S9>JdJ1k-Rm%lEuSZ}-ghF@Y(+^z*84k~keEZ4iB?$A%?CvA;a zV>(sCBw5=n3_c@aq@9q}9?htyYdkCFGxq$*W) z4KC>If^oY^$@BJF{s;8Z^}5(~G|W0o102CnA5V{D;vA9H+q51H^Qt!&SW=xWs_AS5AQTuMm6N?vhu66K7W?d9llGhl5&sS> zKt@vXUpDCqnm2hPah|r8u1mC7BB5G~)rocgK60KObJu(`O4i0~m0AHx1=0UCr@rjN zb0g1a5bmZ%%#X%5fwG9Q1I=EWeVdycIHun-bcYyuR4+gyJxB$~MS_c!b^kcfdy6yy zL7L_iHn^lY@hG~3uXRNb)K%`0WxBmbb2IKWl(s^e+#)yN2h=2l9x@=g4p-(j8o+jd zBf)$qo@jA~K$@(=z3>G%%lEpka)q~Z7wixZLC4zs4Zq1Lkk9K1S45)gJO1WqcoZYjMQGTvamL5i!Pejn`cp+{@Z&&R2lfIn_to z!kbrQm%=_!wDOFzpJB;Cfq$Qi(_t|W>fpSiz1Xl!<`{@caK$WJ+UH8CIM~Nh%NDW{ zDso9KSQFapO{)b~B}w#XWf{lADuk!4yj*9JH{l0Zg;55HJW;9W_0yN_GZY;*IMdw_ z5K>ihm@WgI2%(c8F@r=d8N7p^%W}bwc~u3$FMy5_P93!wEc|##4yk$)FGUj9v)sf5 zcNBaDE?H8~WG1npl4<=XQLPKN{A$CjbZ;>5Vv=dh7 z-4AM?268GRt5N$9=vbTYkZULZRN83bt)0?#pP|D|R!g8a91xw1SWo)LYS5o`xhmGdAp7K)MFTk*H5aHbS=(r`bZUGOwd{(j=sV0oPgC|$a<=yg4UO?~2 z8RAQS?rA0G>C^fwC-(-+J&H}`(!gL_`ix^;`hlk;%-lADr)oax?ca)mz4cFV_Y6_z z9I&|R%UsTKC%J^eP(GF-z!-P?<|YA~aZefM6JB7PpxNfs28%mk2LrmDU|?<5;Z<$M zq2GoWfvLpc1L8TRm*^C;MSpQ@%ggD780V+K2LTeuU+HLmRbh^ZaInOqDH^tTBhw3S zPtyU<*(|DMkQ-B6pKn;p2|;Pqzb-oVF)*GbTQXcb}l|DgkXsIP9y?=4H&^2iDWCTidXFIQV zO{i}~onf#c0>u7~SI_UO+?BrmpPnnABCl-*UG*2$=>(gIl4l4@2Ia?GGV}BprPTul zKAWX3$tl6XpMx7V(;L+W&>JO~7z)Vdc3MzzU4&Z^h?-XY1gk_+5fo&uQZFCy@_wr6A%t2U}uFhAEP3*+Hl1} zty3SE8L(5h-@hy5Qg3%m4)almLKWd93cH zxi7XzzlpR_>{{?4v(mWlyAFA>t08Xj9HcKMNJ-y8byd$L2{puv)eCQD?DYbf&H`om zE|6OPb4~7ptn3EK?{R74^z;z-8}2vB zl2k-*;*V0~MTaWJ9}EKz{XvueyGWRP@cX};52>7O-p-wiu4kiIuh;W~q}Wd7nke7H zx*ngGB@JI2m-kb-$9%@01TihHhtGZw89lz*-c*bM-+t5@+voBEpTj!eA2NPlu_cV; zIzBgEh9A-PpNBbXd-l6Gz8|q)SKvz(J)a7Xf-aRk?-ezl`=J6_QhtYzgkSSL3UGuA zwFX~pcN;se7vEpaC$H-xZZ&0m$;%~vd+xqR=7O)+$2Wc!x7Rt1+aL0WFVm0BUw2S~ zpx_*h?%M@F&|1x2jhXLxmwPwf%YNwhW5(>s*HfVTH?c)@SB}Zo0jkN%RZI0ca*a9&y~)N_bHsc{Z~Ab&(oA9;9+FveG%oaZ>Hw~ zsln^V2>-)7{m17w@Hxv}5ZvbzNxcX2Yx3s2%kQyk&ujI@Z{Obcz+67-RHFw3etyGo z&FFjH60>i=y%W`?5AwdE->)F~GGIR<&pMg|_wr#lXV9WfZF_|rMTUslAnnf0%6Yo8~zkXv204@^eiD`Sy%MUUMv`lBAZqESp z8Uj$QkK%2h&#-qZI{A;^Hs?%u)Y#86Y!jnrXCyR>6<=7?BUeOCYow+aiXkqQfdp;g z43L3A6b;m745V3}_0sewR#4b*0o~18-@0po7K(&FiE;mRPR3B}Or^Gxt_+a5cik!7 zzMy4Zw^-tU1h`;HhiSlhhOQISj^9D@C0anb@C0&ZpGiF|DaZ=vQtT?}@HfNW4THduak|E`Xh_x5d0{Fl2Aq^oa6F4g8gs7IeNs|f1MyR)&>_?m=$M78ylQ^oF!&1K8AVw%TL~$&X?CuT zkH-tj_STmvOUcpH^QPUs#}%q39={!${?crGE3Z)MW?5ngTf&)P)9V!AGN%(i=4)J> zD_CZ+SZ}eLNYmjYEDCdtxJZbZ~M3Fo1(WNnVuf9zPi`;SIu)>8b)89K zqL}Asq+uAutu28EnhemS6C-=uK}Gs#ERl$b{#Z48l%+6oHvp`1#waM}6I6e)fB%Vq zsG8}>`_tRpsBc_;54gJa964~8Z!<}_!8abz7L9ssFQnvOJ?ao2#YJ9xPtI|emVFCP zt#yn1oG-(WwTsv+;qj6_;A|2FT85? zRKF~Wh*?ISH&9&H{L0%O%U@Nj_wTXfO@etajo zMo*sY@`g3H2AeC!;p1xgaXVMIU(ke)|H#iF9w)+bPvn?m60Z)CDSVW2W0j|a*LuH@y8?`G$Rf()m-vR18=FtD_bkc@Xe|^I#2^Lo8xgK#m zNynMyDWc^Od46s2ImHTXSx7^tGsd6)vv(QB{tI435~ok!JBqQ$#3SPVjD~NnYg+7G z5L^sa?mjSPwLbTW6%}9L#cw%TtnEOkX0M*8b}Ro0B;hnvVn^waq0TeXx}ooVv*Zv! zO7Az@0uu17IlJjzpbfGbDJcXqx0;{N15wX}FP)K7+0K*F%lX>f_@%Owd8dYQ35_-j z_sT?dr%P#j`v=qkwT+uG8|h(Vq5dd3yR=vRiG4 zf8k+kg?|X#5)4h(4pH?!g{bP8iksHzI#eWO;23FMDk0Orn{ZT=+!Kw>`RJdP^E6>> z-@tTNWJ(u>f0n$3jW}x@kr`+dRP8WTR3)R_fWQbUl#JwF!&023GH?s5oBm+!J;e#O z!~v!b(s3@q2leo~_Ba4tON#Ki=i$G0dY>%13hb70(n%s|q&1Bimz?AaEJe>JEr5;i z9Z?1UnJI@!vVqBJ1EA>FEE@365_-yrwcs_SBzCmKx@UK?iRE9xOZC$|5!I7*<}b%I zLKU6aX(Yx;{mxlB#C)KJ_ZMzsSqw)a#Fx>ganRMMD>eQ|Z(_EpK9kZ_LH^3TmPQK6+*8vcutJ>TFt^WYYDl7TFK0({$L( z{bKm<+eeBIgqEtqSJMCO)K39iG%U!{Az|ZH+m%sh5232IkGt_KRL#v_Jx3sQA@#9FJZfux4*m#naiN<4&2oad2?=WI5c zypG|rhr;ty$~#MVFqvulsJ^pn9`+(_XxnyKz+q(0 zOHwOK5_fQ#<(1m-SZK^J?uy4_d0od8ZZe>!51?-6-GdB#Eu4n1RhvwP~E z=unVsEwNWN0fsj@h)V35_`N27w*lzxG=f0fx=H}J$Z!=@ZbE>Nf!1g_rI~J3k+Dt! zwC3(zm7rV^<-H0;+h$v3mIuys^c{yj=!T|+$}x z?5|t$sU{9peV<9|GxyTg-cM{-sOuCDGOnLsEBqD`B;@bV=%;#gxmhRb*TT6ftq<)$ z?FXL3HFkSr!X+BNRp#bwOBHYm;WZ`%Eyjun;1B-ndPEfJrCu9y`_@+?B8zp7*!`pr zRpurbm1Phbd_Pate_4Hz_oGRakS&1p_H2yD5g8;pVRO?0I?u6k`2#}8Sd!c#)U03F z1Jt7P6x2wEKld@lRdm}9TlJH=H)mGtE^W%p2L7sfhd&gWVWdVK(3RQHJg73r8(>Jg zvAG$l6NXW^dwXxpglsucZ)70+w9DWp=CC_`aTOP30>x%nq_ zFPb-7=r$_ooooPI1xKejAc__cBb6%#1UJjS| z&ifVai2+|$P#y#RtKRv_PU`ui!c_;Pb22MXEkK(k|G5@UQSvA{sf4-V*>236#`9Z0 zc5C;}S0q5k%6Z9Ngu~f3T%erStM?iUyTW72h@CnrOIw$8;ge+)S)L-yyBjn_MVY9# zTD4%n$2!^Wq+dQ#=0ST;qHqfxoLW@grQ#XfcRI8{71PgSA94!w+1?cU zp?wJ2IMG|4Qu-d$r2XRUqV_zTNuND_c6(ip?^;&PBj!B44Qs}hXHk)FqwVq|t>mi) zbKj!6JZATrF(k{o{K754RKbotE!$aF9rlHrYS}J2EIb?2tzHhmn_SQ{b;-^fi{lRQ zG9J{EsK04{TSds8?j={BVmIK#1_<{J0lD1(ET6Sl7}qsdTMT^w=jEE)YN2+~z+X~? zcbzV4qAQOJ$h?9Syh#psZNN~2XNV$6oq?@m^$qjsu(ETYP3)bj}6yk+! z|L$x`SWVdB?Y^weQLmA)8`~DTZN%YYom`8?%K}@>g*?*SJ~{r(jNQeISW`XtX;!|o z94_q}b#uU|_$fHibk(+k>OpTI?h%Pin8>Ly;S>3S`WFN(JKG#8b4>Ldl~nw~Hl3Z1 z{V)Oq)9Ad_Eb-klZ?Yi6=hlAR>OubTz3?1VlFtI1Y9Cp zfCzUTLi6Bgu=&=*@HXM?a)-B6~&ZOtg*v56~Kw$9mi|2Fgr8`N{MmyYlx1xY{ z!5TxmWmWJKg8)G46A@UjLOG0y9 z+vxle_~p=Dcc8&0FO=zRZ~g?_%MS+MW4%5yZJT$+XzHbzP}eQi zVb;DwzU`QEqkzvX^4E;w%am(|d_ZsB2(!mMozvMd_1(WEYpTrTCpF*qmgn<7yT4A3 zuK%Mdr6!*PwlJDqLKJ+?X9|=cF%c1we#p;G2SGu7jSwI4ulPRdK{$T^1bu@c3MNN0 z@huf-n(r2XaD4Fc{OO_SsqQ{`12lB}weJ7>`0QopWp)7gUXj|<UW z!mn5RcLd8o*v3=Cf!gw_CE!^+Lgo^;2m@;7DRO1kAG?|urue6oq~un6o*-Gia0u}s zNShTIh#EY8wBe+ghFTyzKYIaz3Q7%B{c?HaEL=rLfXeyg0rj_}LiBn2yCU@zLc0uR z6p6|cNjOmiKjILP7&5f6kH3yrwJXn=6V^|V>?G|l7|UgUfJKB^JJ5c+PtJXeA=Q3{ zg5pvA^tsKn9*UJ}y^QU%-jqE_geHr+Momkv?(+yvVum-0m_|I@pnXx)D@9h}pHnXc z49Fl9&NhhhIXNpi^Ks4jZsxBncc2S4^Cs>r*JWdn5rH}0_H{=C!ktUsQ4x>V zq)>gqtR1(=$K;@7I7Hptvo=Et#wjVGubjO?K5GqHTZUt`_sO9?#U9-88R6$*j2WY) z@hUe#3$|aifC!v%6vY(mFXq7vKPUK7$cNT(Jtyez^xHr2%=!H&klZXx=zWG&@Ah-e z+*$4#>O8hw;hEV{79$P4ue3E)!!Cb2P8nhOEclsd>eymjF$Rkr7x}f=b6jtZS)SZ~ zC?aX2xGW+leS17e%24+_A+8EhYA`gFvZ;TIMf`Dz$f};*Ye#-c%Dte3IyG&x2B2?8 zjOR3t7wnD*&5Um09J{5f389lpc6|`G2ivFg@?6QuTs=A}j6Sg)4gsHaRE$|}6`$9f z>@Ss7$Lj2ae3A*|#7EuuN&(g8b8LnZW$biQUsm-@!0Bw~)xlTnnGPsJWh#>R+S84Y zd=l&WtYPYycD6-MRIyoR0(hpgiHNlvu%Ue>2ctp1JrJYxfovE!FgAWC%$0)*Oq3^s zhryBVTK~uHKGo#i3%-HxmK>>v-~LT6d1p^c?1B^mf>-&U{cUrz{vWYxAqy^%cquiv z_g7C2CUYph+y_|e#DNJ!OfhS0jjB|}8pheiRFCvzDVf4S>}HCV%@%V)p2KX{slS6l zXtpbvP|)|yfCR-_7wvKPc0CTftqtGJocXu&=ljRE8-s$&#oddg$)};`^PhhvQ3bs2 zH$HVegMa!zOx|z)fmiy0-JR2-M z$KIts0|}eY=XC-v+i(~Dx4+(V6RLuc@?DtEG&3>S$Y6 z01RhkKs2(iR8(UovqZS&S5^v9jbiN7;@Yg4EB_OEceM@MUzBua`Whb->kbFCPyqx-eJIl0cyMj0cidSQpU=`2!0 zy>oplzIBkto-lQ9T+_JATKJr6wFTg?^o^#f(%m`7Y`)`tvAa4ZO}u4y^3=S|R*#Fu zcJ4FRY*&EBdF*AxWF)}i8 zbj;|eI59m6GFFU_NmBci;_qw901R~eXoRGw5h=A8F)@ik849u(piW9E1NJC9hc;C* zx((}p41m8L>ED4ix#GnoDNYH9O@?1UMK8`OBCio`7GpwL$W&NUTil#h%*-clE-LNF zCT$=p?MtYgP@{U`O8LsuJm+F&e=y#1q#?-AimnYMGcosY}1(T z0d$Ex!4F19eWfb<-NG}oo1HUGWW-a7Ob$k?`Fe3b^!+;9?&}_4{CmwA3 z%zbKI+=V|XoqBKM4NoifgojIC+dSgN=K7g8$qK*e#iFPi(2bF^j*W|Xi%EFPrAV5k z42%DDMgF?f|KYaNDC%pH{=JL-Kjx`42{g4cg^MvN%IF0eDbX?TEo4-5WD7Ch%A=I% zF$&27DgHJE&ohH*iSDSLq3N^fnjT%7N` zA$B9-J*wgUQYwDe1Z0eD#F!*JOyjUMNU>9H(-61b%{HszMD9`EWaiRP;VQRN=b~Y> z&Q`^ye^(^S=kDwhSFUNKf-%;Xjt$Wm3~$+G3}m3KT__Y)ON$wciW_59#!phl$4o`X zPj&w5g3$iD(tllT03A6i9le;jn1p#rx!kr|^jOpmo%~lD^@{QsrK&M8^)MZ5Om%b{ zbv-rla`lhO*aaGt;_r*#_cM_dlpI0ga6tscaTZGtmE~^|iDbJSX;Vt89 zdy$6L=}gGQ0cq?hXIx`CIGU-PbXJS`=FYKClQ-isjh*y6HF>>nJ&!*k&ja7UYZoVu z{?ned>pu;z{$I^Yvv2gk>})CnKq+WHEFCE2_VyrJ|k-k9@!*a zi#Wq;)E*I-m3r*YYpeQMy+@1!BI4vRaaq{Q7Qc=s-X00=Or)N>M$x-RbHDnmE z%$g*NE5;h&bTbA>e~RtFd1ZLgMXJZMVi_^1A4H2CKH%b{lhNa(TPZU0P0o?#q+Cfb zqfgeC;{3egn-X^GV7)fiP@*lHmxjZ*p-s?qYi2zxqta2&Zgku|=TWjN)08SeK+ z#r`A0&}$}7v-gocH-n$!ad{6k>IzhpGsx&-e_YsO0~+3p6^DvT#wy_CFm`CUwH#T@ zZWKli`XaU9S+J~|gbhl?D&rI}4r#@;;90(YE4r0BPM9Mop_dU&F`%(m;>~9lJocTt zPC+}YK&EK6j@wCJ>A zG*2{SG%D2*)NRzpROy=Ta0M^l?&hT{0&WD^Hg_@xHDc{?))+ms&YHQcoJV%*2OTqP zSSC$629aVxI86)|x)Ti-{s+1Y^;%X9BPI=l7_lWd;|%X|D~ap0)y-Pw4XY;c zgMio#9AAdtIz$cJHj8N3U8=7ZC>z+nwa?nPjT(NK!wAb6bjIL&OlGr(7sc{q; z1zNo%H!BHWzjten-$Ob z29)Q_vTt3sYgw^q+OUY3RLipFoVSgD4^V!!Kn=&|WV5$enP>%>vMpKUPWolRa3b0Z zm?TZoX3cY2+D)wzweV2g8N2o$6i;3pAI*#wG>PIy)}m|>b?_lM;T_FPdlRc)qTYJq z?aizeB#0VDlAwGc_Tlj}x#>UXy;SkZoV4Vuv*YE~Yi>7lT{(!ERL=_B%bax2BIM+< zcUW0(Z707ob)7!wnH0S0J1CeW&l=`bw98%fv%8r%Xm3_6izwgPIN>_IHUyn94;ohv zut$oc#1iB1PPk_rGHVIAgp;C(5=ZbLdE#v|&l#r-SVgj+n3@aI#JS>KFoTT?2mB&o zQ09p(xF_5)_7~uZM_QMJjavtZB7ITri50lB-MIIiR=48@v?ISF91@Xm^Sg0xIn8V* z4j4z0qev2el}4r^@+|k@zHk!$5jH>>IgX-9RLiU9d!90*PFK*>?;c5r!bR-Bv+nV2 z*J;T(dO#)8Tm=VVoLGaq+D&V}e$_aBKqt~2<&>C{+fH-&sD9MAX#gj(24!upZ~!GT z2c^?2zkba)VZb2LALZHHxn0f^w9Ptio~A3ha5;X$b%R&SJY-%y&YgY*z|^DakaIcS zn_DS~9fuFE1#IYbbly50AJ2`Kw8?%-?*Y8(NzmPTf)1}owi^a8Bg;@e@$tKQ0nST? zMvPPX5hK%4@_79YF2@Q&WhK)`0jheX9dQ~N2dsVO`Qx9{!2nUchz=xIe1NwTei8GW zdCG(v=$v(U|LeGZ`Z<74&!eNRh-irJ%w^@gaoD_WTp;}w(5C0mN#w*sb1Jf(IBuLy z4v^IU+OFr?{>x=x|GjgLHU~nwe(g6schE&+pLxMJS^6MARxfQVQ$_2baos$8Tqa!| zP^JIlTeZ9PVPg;MC~00a0-+vfhn>sHS>vF2)i`JRE`VB3VQfc}d*5YsKW2lMyW_8YWL41}N&~cKDrLPwcmk)2A!0XOcwY98Sam&W=4yeBykgJNjCU9d~Yw0j%UY<+9U$qX8ipPbZhaD2WkobOidbqu7-(iv5t{mn_I3?lY zcko%i{&wy>yjz=+;8%!{T zq_lg+UcLvUJNyH-b<416sXS$YBLCJWJ78I|`9afwW!W?!Z$rS>@AuQ$&B}#qUmdMW zMzPb(VWMKow09n&0IomaQSZ0*LDRZrV?RP(sz8;W#&h+R_G#0UW%o2}UVJiTxV%7? zf9u2gHP5l@*kSWDeqNKn;meXTiYm6lGtqCJ9oL2T-Wtw%m;zjW4l=;ReMb5I!^~-S zZ5YTOj!h?sRf=7UNg$3LXORl{rH*V7rf_hkmP2+Jo8s619MZMpUR4kCu+gYtkm zLs`bG=Q7Ic3Jf=e9EMVW&PKu`VJ0&&>njhMg*4qe#u|c+L&W>UOloA(R~$47srsQ5 zRu0iX^1XFM27sQ^$fA$oks0a$dIC8E*9nlXBOn*f25AZP8;%H`8_$Vm%c(CZs0i|O zuSi4dQ$4AXXPNBqKJc-guWZTh4vJpfbo$aQwhwpkaw@)l~ z9H%c^5`8y8iXRN1yHcnyF16#61~e*86H7?PMKr<~P_!{v>CHSwvJ=}$*RT%4SknWj`z=q3`ri<2w=^?k{Gz#tsjerwHNh1d`$s?ca)VMKR*!Q$Xt`adxsYMmS zvr%|3T{z8vFTc=K7{k#TS%tWmlRVc4=d4KDN=#)EV=BZNhI5aIpYmmCE0$oy50m`@e+4u4{VbUn*^b`dz)9!Z#3nv3}E?salN$ghO4k z<@b|@4_r%+)j2l~(h_NM7l;Ymz8$d{7XSJ`)OIBb*vLYko#rpGjd`4lu!L1hzpnJH z;_{2bsG=Q$Cssryk}7^JtH{3UV~O~7YCqRe{+x0NP{uM9K&P+XQQ@Wus$>;@b@iTj zo2I&{<2}e@Q#Mb27vs#O>sIrrx>Y`|no}(49pl2!XVo^38Iwv?rYlk(DvwjgFJm>> zXTmFIl`^j%LrZn1J5}c_w^LgwZq%+JEhR;4i*t0=+K(DzPd%n%Q?sa?PyyAjN}AV> z8L}#x_l$i=4X2A$!>Kw`U8!>sGLIf>)2?ikGjAV5NKK`yQq!oaR@Sa^QK~^lRuh;F z93V~2q3cxhs5(<#scIBAuNvb_J*Rt1Bc?-DBdO$9X|HjS+^-$`lKM=Spx!fpnQBY7 zr0!6@uGU_1Ew^7j#*(^wa{A*-QDA}H#K%-1U74CfRkkuum5V}&&VJXJYbuqVQhT|J z^nT+Qc4`%!qPjp@%;Hh4vIuH?E&_(g&&j#ewX#pTil{VG~At5Jsd6#`fG1DlEYjJgIP(}E$g zcou>hyebw2)2<=t_-FzkJC&{6Y(^!Ed=bYyhlSO|0(Y^qmTB)0WPB)rE&C6fx+3`1 zdIi(&A=vnMf-LsmR%eUL)QG~n;S#zAHhz`Pvb(KA#POX3_v~M-h!(j^XTRduJIs5=?%1`mnGOA;h-lpp~<%q8p@EQ~6wD9j}6 zDEvv-P+0I$2__vneejg#1CTzXD#SMAG=wlDD?~R07@`_7{~?wwq$$KT#0KmQh5?I% zHNc!;kKjoZumTtWb^w!r4Zs5605Bw2ICy{*Yz#&RD}!0U&ft$=doVFrU)fbGv2QsEp0fGI$C-r-*mh?N7`uFSVP(N%kC1jjP0A z^%PGIJ+G#F<+oB-{lkbU`5bEAMczYqz>(fUd+`z$x?l8oW4$}c5&uGaiL376*D3uR zU*3E7&8?&<)0|0OUANW~yZM#cCQ-|>DL@WWldfgLltGR^@3T9~5zzuqv8$G4&=qgY z7CjO>p4la6iarMgmKd2A-;MJ@>ku@RccpiTnS;yAIaOkng<#6iao95CDZFfs z2hu~NO5`cRb>?p-PK)_@(t$uoDp9!rF-M>PmA^b}CK4;Y-P}fTq_iT?fJ*S76-*r3 z344$9i;$n$%j$Aus{sropa?+-=vSrn54D7yMixj4Fq#`JZj#1*&rS%IWQ`(*ro-mR zEHQhT&w$N(M-71fRDqL9Kx3)_d5MfTc6@WzRkOHJEub^Ufgsx}@~%8ph8^dMN#mkf z)~E-Nn2Jx{1N6e!DgQfTf(2yDI&Ic7`T-bD6)T66)isC#ETI~gOUtTe*0ydOGiw(aNEzhaW0bNHUx z;sVIWn?F`G+$uf0DYYJUyP>W9_TY_je8_T3)VdvTOU!^Xpt7%3EjUpdR9oo*YfOQ+0hz>{`&96{P3T{C%zBs;()jj@A7%893a# zWWTRFhWZ`iG|gJQw_UWeQXV6^D0A>oFl_`HL^(bRZ9oOjDqJI{#D{5S9&#q9M2)#; z7P2m<#D%%{%JQ&!C>&rxDcGcvs7*Ukjb)*Mz(y|)N-dm_>3+9qO@xrM>h=D0-u6dh z0|SGNdKxZ1>fU+}XN{bCryq)^SlD}93}f__nr#N%KH@9LvDY`J@6{%w^woZ~+IMVP zcWg(4pwyiSnb3EG1#KntKCK)I56CI}%ZlU_{$;yz3jZ>ooWj3MnyQQfsG2-V`^aBL zQ{|DErLM%s)uy3QlkZS-cE$@6Zk&^y{jwV1(LF9WUX`VvVklT=pzIJ}a30d}7!ETy z=zfie7<*&$=GJP=F?lw_-XuCdfg;c5Im-Z?|CjLt=esnhMku(LCZR!9nSo50FIU$; z{Yjcfw*GsY*lPZ7ZEUOgzqP5Y=6~5AqB41V@@h@4>2w#G%GCMK@Ggyww&5@>r#OqF z=b)K+}^>Fpj01BFj3FpuJcJmHk?jfmIRH!iWRu>?3ci}X{| zfC;*h=~~sceCOBX!u~qXh1&&y&zRzE3h0OyYF4$aw9HOe@uQ*QsD|Q3MJ}Dzm%IGC z1?Ss^_|xbGEf7I(Hi7ElOz*psL(rQ=pn7{GC>1X#6CapEplWw~Q9NW=HgvCQ8pHou zwD6EnafLd&ycO$Zz5I%5C^VQ}W+RblCn~LYa;SQ;iOl$_Aloa;S4T=vTsDoX>SzWHl&bnglE^;``l(}v0QoIVB92K4nDN@jYRPp*1qF5Ue6 z(%gL8{QUjge8SxP^xS;i{QSk7>WTNk%JJ*jZGeZ5AYXT0&gQhjg=N`&)BN*)zkIhF zhvDtuqo`S2cKf3?Jy*q3>ALKNoqQtYGIQhMHIMm6$TUp*&nPV1wF(3mia_@jZzLPBDg&|$~wf=evVp!hq!O#fcOBaf!eZcCBcKEf`I18 z`oIZ62npZE38foU1Mwlp9?k=T50Yk}G+4jS0RlYz8^Sl(AA-WPsSk4Nb5K|x3Y26} z5k#}ua4=?{G=#LtWo_dw8}1RHarbDg-41qHqR?7SRdt?4K0~He0hiO=rjbKhzJu+~ z+Qv$!L^@9vn8|6qS}JWAW5IsDP@H1PdJ)+y>o-TOtp z4f%3={=sxvn$tNMOH~B>q=4XV76N&W)n>+pp!C^la1zeZO8(nc#>rwj4w1v{M;%$z znb79164r4)=DFo(DCDOLC+wxu@?=MuXV4 zb=fnTRXPKVeQ(!aJV*-~JYS2GnmvF+Gr$!I%_8R(-LEazV6A&^_+^f?w~Q5eF~HGF zQ8*P_t#a96>*nK6)UfHRN5uoZLLwHMA`cxgVkdW8#!`nd)nK{CEw4zu>#YD`0T>F= zZE=IG4rjBpFPfP>_*maAyfTIjdlUv~l1lpOD7py7lB>E3F~x<^Op6oAR1)i@DCtB; zuv9ciEtvMIMP~w}I-m;y;nku#%QLs(w?xBC(6ON*;vig5lL>)hmuXKFC5VQ252_&S^Lrl&TqrP&DePrhSTKMv##i zO{_{q-3Snwf#j8nJYps;C~}MYI{%SZx~!0y=_Z(2A3#)a-6G|*9l?Kb?c%pjOvfwE zrSajTVtM0+AxaT5&A_(tlG)p2yK~Yj8}`BAG_>IY9hK_CLAP$@klhUBY-06!5qDd(8x9!g#A0t@9wh4mKL;Z__zC@F!85hdu4}yNWAfwWJmS5rKLj68;a3Pp4 z8?Tn)b%sP~-m9oCcJ>=~QLDY;6PAiHFo~Sex>T4h5tG>ZH*_>jr+M8`Oh_XX-?l-| z_X{wT*)HiX5Fz>*nh+A-%LFSb1M)dKq;1PNkv)!%_W>&0YxRVpDpP>W&zkDUkBVN7 zSiaFT-meYK^gXRiS9AGfrd~x+OhYr#)lunPeU;EL&*2!WDu&wX?x?wBgbA!`ep*#` z$@K#RYY}N=wk?gB+8(7G(`TBXbBuu56W*bPPp-EMt2!zDtuco2Da5N^RB&Pp&S-Tr zDU%+$m8}-l0Py{*5j}Vv z#{1<-i)kk(WBy;RA7MI-TTVD8aScQ&vviBd!`w(q9!+HIe({d4t~Rx#f3=G@y*p_o z@_n5C0ogs&a5PMm^;~(rO7pwZQF_i^s;=qWaz#?`b&2J>+3@k8qn_<}%|q(Oo3p;qejYATK=wKNGY}8K#qgx6GBv@{_~DB(pVV zT!moF_~kikTm)mtyCZL$Xo_cT!W{O{j%Ugo;c(*kfHld?&~eC*aKyT2!kl4rq-Xv8 zbQ6o}ZwCePQ9HE5QWn)q69v)H!82Jf>yM3W*yJuf1R*Fq&<}qLuEYo#X2Eq-7nBrN z7mg7grKzFg&xb-hY~ZP27lO^uY^ZJK?r+F{FK=_t@SNFBf-bU{SHu86|(;kQ7bOE;_g4&*E_waT73AoTBcF*XVo?c2ngHvXG`dR*FD?w3^?yQj73m5#kkLjE%aA9a`xHZFm}rQU z$`mvvS$G_&GvJzZbS1BmE-*`yZ=WCp9J=3B|GU67GDKJfuyy zC@O#^*&zg!?{)uz7_l@8(hS;I9m++m@D;VTb)k##ZD+O0l@Tp0&>TybILT zGi4-vRCU`+S~6Q6W3(ZQUs<){|BURvJ#UTTUUc96IZ|hqwLooHNtct?v>A*j{1Im| z$SgaV*(g(N6p0sNhP5{e)w*40DfkA*eLmmx8mKy95b^l6v<}aa-lF87Oa$$-q?f$Y z=%W5yN7qQ#w|2RtBBYh~cBa*~Lp^j6A?BjC$Liq-7re;OV+ZHD2oz6Er0f**9pRL= z5i3>zLh(YEL83a{oW8W1EQ{uoyCJ%iZ!T5JqQejjDS~QlMFkuzE2W45nbMQCi{scW zB@g+}Q?7SC1=e(tMnS@^k_*q}1VpX9q8wQ2HSKcQ6tC!6k)7zu1(Uq2LKD)5C)4Z(^9!O=XN zKPu)9ZolMEbnB1$6&ffbn=>T2C9vYlnvVwgTbXSAtgUd4_FLa4G%lqOY8a2rvYYE>eM6&8D?A!ZtW{JStSIW9;FiTikIIdwO0FNgWCWTROT#Wp1 z_3QVAeP3-WTnPrMQ&1>m(0aXNn{^y<<=U}iw!rlD6wtGz=m^QLfc#OtF8xpO@2DKjmXk?JedRQ)WKDm}=3xvq zBwQxOBx|A|JVEg(9bdc`(N7T%eF+W61Mpw2jl(V;nLAqj@V@ZHB>@RNw>2-RAK=!F zBUbNLt*#E1ky3wI(|_@!|5RsnE3&fzHO0c?_bluc7l%<-Xbk9sgJHq?n(b9l)k#gj zxAN>__|{kPur+h62@wYOnk4J9s1tbp%Tr@?{@2~3#3Fyl{O+^kPzbjfA0abmq>8qK z2toRc8IHU{kz34o@rb4cg1KwP_ez>sk7=d=`-pP$i$ThK@t|dYD@r(x)RpiIb@q0c zuj(;g^G7!m7kdooO;iTH3MLp7q!DeQJY0ILWRp=y5op)8w-g0I@Uc$O3pQTJ;NYJ| z+>nS2WJ*sdkpiw8r7+Svcwk&2k+U*r!ziILU=2#Q0!m$+a7fSGt$K`*;;CWT6-3C_ z`;Vavqg_AG3ncm73=%Oo2XUcSomtz-`N1WvPWdb-LqyZ&u3hK3GyaYCYiuE(3(K^y zSjZeoH?hQT6BA6=B)>M0ilIimw>@O?N0Z39?Hm}vM=6jb?Juy{CqFXa4q1z{0e$W( zssUteJItzliJDbFoh4QdZ{o=c2lzVn4;KaMUj-QYH@?2U{a4`PhB+Jy$~TFq3_z-Zm^Y(v_+QoE8^yXC7rIhj$VlG`zwc$M!5qF6hP)JhPo8+; zCn?|D2oLtT9Y|O0eCKj=ZGV}CPW^l@* z))h(}8OkKTwv%597qHqU7P3=J+FlCRzhiW-zoe)EsF$`9AGRPgHPfL8fQHOM;4zbk z=zoKnAMTfq00Yg!m(5J1~uZl4I~7@{#(R6=^om_ zFc1){?=m6(OE&X2V*fKZYwAboieu%1_@o^?Fu9)Re+6<`lvr1EQgOAnwQiswsQzJ7WVIi_DHqR_@kRW8_~iTha(7Bp$kI~!qJ!C7NEk2Uz)=R z>n8dfY=UloDR%ff6#(~gFQz~|t_Kr5mI^3;^iFQ>GD!_iH7Z=6kR@d_9@ScH)NN);s0)gW78TPb6mC7GWaEbNi*%~6 z#>;|;qld}sBKJa4KWUhZrUYByx|W;}5h$ndEYuOwWU3$)+dNz%*s!6I%obb(!A_JH zPLp!PQy(g=<-sFUeJu#yV_CoyjEK0!#=?^>cV$-574}s<#>P@NjdWT=5Fx*64i3Rp zN~z(*G8C-|C6J5njV+Rf5FH~xcvo#KfOJQ8uZXONjH&n-*Smhdc@foiI2Cw9%k~ps z+!L1?NT*quwP^9f0iC?5;RO_}&%DF`5*H235n3;FgsJ=UUF<&QV}yE5a80G6W(-Hu z2gkK2>L??zts`dAOPHvF0tr_p4{P~9?i&KR^>k%bl9M{MGZVIca;+N%KtiWulxEJh zJuXN8=I<1q@T0A^^69wtuUwt#v8Y6)2BTSa*TkEN)VjX=anZ)2g|SEk7K3S&XRI)E zhc-pAoJoT&lM(8tv9W`cL`5q;4MHA5_1XR_)oQUC+9CFwVHfC41Ya9lZcBFwe43<+ zpnb!veDZvL*1@iwJL)38#vk4DkJz;G@{DRMJb$J?ad#;}mF7c(ZdZ}sgaiklk?mTH zx6fNF_ChHiC~#D1k{MQyXCTXN(kHdEmPVSOlEoMAvg@buBy-<6uY!*q6e4+Y_HLoe zyvM-YB^e!!1Q*!XNm5hzOoN;9F=l2zy}}fwoqRIpSIi?x4ljm>iFq%S3i{&0j$kX; zOxznDCQ7kt4LUgo&Q^Jp8?TuFYI3gNur=cjJ?8Wo9G-P^Wls6bOv8HjzGS!1%N9=N zm$oeC4_sNyA(mOpBDh&BwOq!x`VRegHq{0AgIt$z4+wuq-l&k=hwY1wJU5n>&%hS$ zYO1ykx&#_{rf`=m&f`d5a}b$9=cj=EQuA!zlU_?pg_X8nPvqlRH!aWH4S#B6QFOm+ z$aa=8?G6KsFcN@Xncv+BmLyHZl%ooFp{2{&EBbs{#ghrtIf5z`R1l=CDN=_B7K3E9 zjioq@WbA5lDLMpVW}i@n34Pb|CZgYKk4U(MppBs^QL!qK29hv#8lL@rcTYZ6Hp*Sq1`{1!dSdm1| zBh8{;MZ5?wV(KMBzMwP>Gx!o&COig|dlTE*Q9#q@fzd@h+)p&QFYOJj?myxErZbUs zG0sEu{C4SpOX9IDjVrL-@69t5f9_rjWfO!_vm*z#?dbV{`@28bWF~Gy2zu1TcO8X; zcxODC4qfSZW+?xt`)i(<2Ob(9XBXLrat2Qzp^9RHTn4lL{qv~(8~TcK;ICn$jx0#+ zp$XTOgpoZvv`sFby3G7AJtWA;_UyJiJ=AdXGY{@(KmM9Mm%ITht;Nj-)IAiigT1uqCdO@X> z@;_)!rMZ*+>Lvca#f!9k7nk8Z2OTLB6lb0F>LWf8RRf1y;Aq7u%0FAp?&x;kPpvz1cFx~Y&SXtYfqVrHvVUg#PH8`Z3D)jqs%$tXVO~?Z zdyLyMTSNw{K6k8QE-q-AUHz053~>1~u{Q5mNJ-}}B}LeJ%8`k?fnBx@Lh(N-nWHWq z=Y-03Vj>Y}LHG%1Tk{GlBIb@@vhw7%}pw^bpt~;(Ufo4d81w4RBRhLzA!y+pn?D!MT`vI<#I+M99R>}0 zH{otn6J{OhSw(~aQ*niXDosyu3S2LaqAQ>?yMwWMou(>P*Qb7dK(;uOx5BuPWWqF| z6>mDn%mT}X5UeSa5fSp1drHrWa*$yWxT7x9paf?L8Uy~r{f9HqQZjpmv^X?aSuSu1`WTTHH>!B`YiGjmjU)rt+m_b0n8d7>2& zBefF`DAf(JI1Dp4bE~d~;z?WwAelPpG8?cHeA=46T6*y9iigr7kznEVb+r3xtvFj}o(O3tR+t3(sEApD_^4Z$I$w9Y_KAil~dGN&(qh%qA0W_$*K82iW>?Dt~UjQ|CC* zZQqDC-@pE>4V5Sg)>?%Tmd+HTC(Ie(+_M}YppJs+%ivys)E*Gm!%!suwDX63s_X+l z{uld)=E_rwPiX!~3dMUsuZ*8tj6O)nABl|x>FH2@)bCQud)?)g;S3Jbi65z}D?X1MsztPhoKuG41*ILh{scsDrY>6W4s~+!- zpc&(G_-1|a+!cXX#gd;;QdaZIub6@Sn4pxl9WZAZHtUpt39%MRC4b!BL5! z!$zz?;L@Mdr+-6K#@dR`e+W^n{y{`7`d8{;HjYT{ageAddKBRg7y*4BKL+ zT1*WzJfSuH{S2EEVKv;n%``lz9E@en+@XjxaOx8dR{54z<(4Db$)GG9z2pqtK3gjj z8zVahD{BiQGmDW4lDw3H^z1Z*lGqg?H)pCM{`CRMNAU_;2T%nM?V!V>czZOZPx~Wn zg`Pq!nDhz^Sz8PEix5UZAJ%@lYC!!PLQI_r^Zi3Rt({Z#dHe+Z<~BXnYxaR_04$Pb!KgzMOX>$>+pQr`n6 zx-s0R)JFZDB>1oN?|9wxpb{1W!VBYnojLd$u>ToSwG8dB#PRY#8=b#47`x{L=jKEb z(o~9Xm%G>2I>|FhO0a|z?X6;7W^az(^?{C3JL$psbBSmq3!k?_XW?rE9`dPV{AS@y z{aBSr;yI|qFh$@Mp((CDzF2r1@-hX$tOA5`0lIwXkzVlrsS+>+-!#X zbUlDdxO3P&Pw=sZ?EN5&Zpp(7q`S0xy@j>CzW;#gI6+n#S6j8#3c=-Pe%(KcanXA- z@ZiRD2APD5C4+Cl>n>#6i}LoWYgyx{y}VlW=fa&%#mij0dLIA*VaiA42a&od(OP|_ zK(BFq25V#axq-}TNh@q*TQROE67mjdGIR{-#BwT0`OlJ=%bz0@kbrp3CKS?g>85Up z)-!al`V-vIgcJ_RUU~{aM#)jhVk!z00|W|V(lXK#duCy+5}&lpen(oe2z$*6KJQ!l z!~3GcHqj#)A;6npfxbMUQ&9xf&!gLXgk>$wyMn}&2oq38(qsV`W92`eP8sp!jP6&? z4ZesOFaIu^660a!j&km!%yL$D+b+eo zfnm-HGusn3Pz%S}6Ai$+d~gyZp@?CaG}Z@kZEa$ZDvCxk8XV!KTkrvGXd)sOiXYK~_ovpY$_*K zaYF3KYzs!)s2%oS!1-`H7lLu?-@KCoWjsNNcIX;8MCptlYf2V*b?)YD_-Yt*kmqco zG<^HLX2bg&-#DDSnHVZXz7VJ3;$Wwx1MYeY1|rB*P8G#2;k5e%AT<%WX|@O;+u*(8 zgYt0d0bA!EHfguI%>$iqWB;G#&O9Ef_6y*H8CgRiWGh>?F=XHO>{}_y&R`H>gpi_; zt?Wy7vV{^Or7X$5M@2+ZNsB0Z((ew{JLB!Gsqx1zUCYPEJ>PrIdA4)TbDtrJeN2#( z<{gxS&qX?k8(m~wLgZibHwYM`5Lb79&S9eT5qQ#sq#xE{>W!r#v4~0_G-?^<%$-?! zKCX1Y=4|r?0qTRR;Sbcyw5|>t?=8=AT=IXBk$Ny4HqlIOQukKLxJgvGSHZAtnaL-E zjjOX-r?uo3H*v0Dd zo2TG2=a$1=1^L5Ixk?FAMaWX*pL+Vpcn^G5{AFeE^ruUoWkshd{g|bX(E2N8ES`+0 zOXEk#U6(Ji)Jw~Ldr(3`W=K2{g?MebPcCM~n@WV`I;T0;bj>T_0Y5s1hPQMKVw3!^ zH*SZmM9oU-Ul-;*Tavr{Oy}IC5Lcn|IKCWB@0{ zv61GR($~gb+v#ySuJ7D1SO;&^W~0^NI5+Z~m209#g}` z7gvLFm+HpACvNavRJxwZnmsaS@+!=HPA;cU5&_obYIk{(fwKRO+yDAHpOzzQUBc#KjGpGW%GRD^N&iiIopaooXz|k zT>^df^QkbB8i(U~+ooCewhsh$21xNV^0~!mu^l=Ww{Y(!W%lHDrOX~gOGD3gam3tw z&g#X)uQT)sbx1wpBeS+ql9kK-Z3_qd@05Qe4{)@yRCv~|_85N&`|oxT$@weLGGTF8F!T& z)&jmRIy+bdEZ+L`T2h0J_5oeZS{1@8xu#yWA|@+##{*Q5PI+W&Wx|yrz-#t_~_bBs#c-e_-p%&%u zR{SEi+eUPnp46|}?Q)*qbwcP!TyX)tQsiC%O>fPmQr3_=8TAGqcwm)pNh#z=wd&IA zHRLp`?50VZ$<>`H14HUsyk|m41{!@L%VM8Tn)zkUeDT_?9G|S}{!qn@al%{^K3MkH z$r3r{ew5Xdy=jG$u1!vpyRf7%HABl`;T;RZND|8vh1J2cEmZQtBGTfgNwl9SXY#xD zC1s|ayrniYO?j>I&FJ<8KRagy|1kNClIL?^DQUIn1P%9adhJm1Jkb;~0nQU2UET|bb-KgB=KqfE|*~~iFe(Qh_lHPQ;AP& zUmpyAD`l0bcfU6Y%r#}ySY{i%QJF5;i$_&ACx%)`f9rO<;Qk%cH`o zE?6w2MW2nPEuavE zxzlEZu-T5lR|`JgavoV4*LBb~+I3;q_Uj!dS_qQFPnRGGRs<0>!DK}mX~apWFtZ^f zyZ`6@9?Rt~O6;F$*MOM4=txH*>s!truCzUt%hRp~yo5Iw+(DQ+-8-Frw6~|OmVRwa z_T_=0PM^9XtZc+C=MB3?!iXOECv_Mw&k*+?uipr!5>RCw>f-|1Yr* z7sqaN?BmMc#Xf?TXJzsTuvOnH*;Kei$pq&u9x2cMtD=z%UC~&o#HXE=cGqA<5U8W5 zTCbxp3xiGY?JGUAQ6J$T4M{jcFa&;he|5W)BW-WKY$1EZw)k#!6t4o~-tKb7VuVzx zuECcyLxV>0J~Og;)*eLRngQ){(5g)s>>A-A`2}+9D8<&kd#CChM4Rq&^YSmMbRPP&%`CU2APkB04Y)i6?{zHPYEgQVE~y_a+O zY{*kFGN+iFy>uwL9ZpIbYJA1jFGJ3qayE7+cF!YZ%4VkCVMIl*QJ4e+21|3EQSKy- zbi2j7DH0#Lfd76K8B1#Kqw9=?c*WqV{wl{zAHG3-1j-=@A51SKO4l{oehoPBHR7d2 z&)tu#$?O%M2j3mc&*y)cQk;_*m6BuK{iwa2sV}HzfSNPcDpK~DgwKbjw}Au&f={J{ zj4rcuz8X%gwON9GFfPp)UWqMUSZQ!Cd8b}cyPf740a<{|GUE~tK|v2yMxUtH2?0~y z;t&r3sX`}I1w^G^SA_dYo1*E}Ov41|jN7CJlq;5?ao{SdLRB`ooAUHXcM|GB@AYK5 z@c~(%rvqcUqcgU}iRSZFd&sc-9ju%KB=89y2 zPWO_AW*}dScqF?=?n$}RSTEUM5QBO$%O28TPR`=Ji*Q*g6#9zgR8NjeHe&lTmM8r> zDS{%p=Y_8YmZ_~26b0VqHZGl83o6@3P9~*M%kyA@vADS_?I?Rc>NossQk;Aa`r~!-yH@CAn47HQ*52_w$CIUn;mBe6ON=Ya#Cw zk+Q1^bOu^e)$w`P+UZJ7L2r?c!#lbk@ul{)FS9f6Z8LtX9ezFrB4YqkIBLvZMCU>> z-xlsV6f4%qxoqD_-Kw_VxSXU1T_-xgpYVo>);-%oFqa6{xl10X6D@uyc|-THk3zpk zTqVkD%oploRz^BC2F@3Y$$8VFQbjMnV(WNh;jQ4zs_lx&ur5sm;-6tQ{ zsC=Jd;&;H|$ZBuJ(@?(*?YV_{DiFzdEo-H1J-O#Z!^b>hn#&(@i=5gVcG~Ku+YjDG73>uB?)Co*cY1lQ;5&uEfO4>0G+GD&uh~jRoNO6@bywWLI;wx(s#pHGJWU+D43CXh@mWQZwBMg$g z6v`Ol>^ot$BFSJ7SyIUd9dPv-ki#>==j^p$Daq@U9GWM22o;xB$(LJJJ8pzpqI@lM zMAT{RLP>>Sugu%QpYG`9ycWAG@Wk%INS8>C(Du&0$2Jt=VgrFG)dLMrf||=t5wysy zo(ol&8c#B>-_A#_7zR}zVZ7_4c4_q4{SLYA@m7r&;2DAE$1NL7B}T(0a~_9>`_$&}EZwJwN|hcw95}v#L%{uof$@>v)*yK&F9< z-SKNK=SYU?0$j!<80%<0&vLodEwo*`@OAU(V!wOm7OjDuw{T$J`D0+Wq5pqb16QrD zfgfq>X~T*7no1iqE@VkQ4 zZo`d2RHIIy3iY-=x2YpX#KVjCb|1{SyT5X0icUXc{h61dL_`zH?7`_$^1KN&y5^Fh z`Unpvi{he&zp<%`7i#002x7T#0tCCWUt&O`W~hq^-UG;C;gV2L*{L%yfdo`6(mK zJ?|)UAUVRBset_o4IoG~VSf1f=uQS`2?VMJCZg-#eEXz=kIuY35J))G#m&}vU1csn z{Vw|2RKts&ifsr-&w??9%l@5kq=&Vur;D3|(60=IEw#n-Ejd3R8hz>FDfPQ}L18Iz zC=%fKf#LJ-hgW#)vWw*VUjV`ZHwDofBH&VnPO4%?p1!dCYA#yFCW0m&O4w6Y4}^0I zT5ISkXpJo*hT1y+v$N|bu(nnpqT|G1$&C#`KX(|(zhSZTL|P-^8}>qb$WJQ0|9N_} zi-vxsH5CTdO7_5|jEBT^xq`w_PvoyW6kTQB4WwXAV%tFkkp`)4T5%EcM~>KAXgQrP zi^({f(RZ!t#z4;V&eb~{f>xB>%a9`9_f17yTq(X&O?BL76W(y`y270ZqH8JuB`h(d zCNnWy2HI!xF^1iGp@psdUAkI=;XV2hLRx~YZ8~}aCt=CqP~)^1lX#p9?VRAZf|F0Wo3t0u91@{&jP)uXA5L> zsHt9tm5_aso?@#)7$+w-^9E3vm&VE8c46ZQSTTEN@I2*BwNvct$e{{*0V>lj<{c8w z?RG^=QnPz>wHe~8xziPVM~4HXogF6PpdX;Q>gkE;1W|@$S=U#-W~Pig9JA50w{drx zy*5@C$kv;@O2`yV5vU^1Pk_;%pf)vW?%x^9)y)p0%YXyUfWH3KWe^au5^Xr(W(g1w z1JESU|ENLlmC#@9tdZ7$Z$Usre|Rf8tXnC3zm<)*ttG%h6ECCb076982@zc)vA5GueWJ^vg5z;_YT-Wqw^=v!?Oz1}=2MqDk z29B15?#@U+Xi3eIc4bz;J*$CB=!YZ_DDP}Cj*=W)+-$5}1Vz_d_pgNhyeH%OAr$RM z)DF~#EMIh&tX z8cQ&+e)t=~nEv#i55D0~X!#mSxsKL=cliUC>JRy7f1&_(fBeORf0Ms4guuQ#*?*F3 zIN9bYj?@nlw3EI4JCb#SY&7AYjM@g;|1>PL_~EJqz2M^@5Go!Wy!qZ)q7`8Q6tnz= zA~wWZ+Q6aqcEC6av@{$jl+2?RZ2?ojfeV}B7!q#^A6zL)*V3ft00L*g1+gh4z3`;~ z%FuDt#VZHXFbRM{=3lzVKZz%WEBc^f+$>U6qF$&85FGs%#Sw43DR9<>v^RCO8qmQ% z11<$D?_~19lLB2~kDDs?1*x0ZDS|+;Kr_+Cf*AD055iK%%NFhm7lT>CpmuJyxB_9H zV~yEn8*XnQ5bn-`eHIrcP;~oN0^#g8*oQ}90=af>9nktwQn>jL_K7c;Kw5oU2ef|v z3vNKzuLHpZs{OQ;K)@0bHz4fyNMHidFW?3A?X42HQDDEE0h8juBHk20W&<}0>^;+% z6ymFRQsCb0VQ(JBgt+kqFNkkjiE&c}_J&+ciWh5mQ{Ze6?7dx>6l@Rzd}c`-JH2pI z1@LEs1@Wz7 z9yfzv7oKBMuu|bofwMudOZzY>lDY7$(F$Zag5c@?pO+%FlyqDp;$k1s*&|Ftbbm$R7q^WPgC512CTZ{tD!K0_Qg| zn14b**!d<*qfFsR@Ervs`}8a8Y{bXVVIk~<5+>4RUi_5u9cd%I^edX5`-&u%iFo+_ zg60onh1lsOOkJelAssW>^m|9$oY&y~p#^lPPF>(PTENJo0J46X_<#W6J`hL*_)uj8 Lf%1Vnf1Nqm16&EY^Zx{dL1POu*VyNe+N3Wy^3jz*yWv`6;k2$%* zgMdOjf`WiRp#Jk9@IM`n_CJT~yBO%0SsNO;>M+qU(lP!Y!r}a9aMm`4Mh^c?P`v-( zBHsTVl;PiDVgD=FDgFcO@Ba!`$H7t0(dggN{}t5#B(VPqFbv4Q(2;r@p7j5TdCNaW z|KCJ6v9!_Gv!rFAb8!5(8T?O(v|`64`uS0VN#r4PdOE3Tf5#|?Iqnxe(k+!KX$G0u zn?pMo80XRMRY}8J+3iaH5$NR;SSCgjzJb(yf#DK3V=2Nhxkui{`nu0%mb^ZaieMVx zf8$1pl_@+fHs~+|6{_>Y-1~z35r*e6GbbI*M3{E0mv7rPX`lLtlBOd?dr-OYHFP9l zeqATv$7q=?_w}PwIdnfV)L&|T_9u!AoTBh72&w(j4};(7Kt&P6Gz6Mu)LM$`h-!7! zJ=PFR{1!ihu&ZFi$Z?VhUa_n{Tqr@C{ z_*P0x{TXo}dYhxAB1%kGyWlpf4u9O_;G6p|ORk{Se{2{nyA)p8c}jXyJp^8j-1gp& z>s=Gxrg=)@J6#*-2lVe2EFgI`m_9W+Se_o5hky~gzY;{Im1zEkRfo_F(Lee@vj;Pi z-ToQjEh+(M;`0Xme>L(S^N;kun#zB4V`lI_)BC^7!~Y1+_}_*9H+j{~ft zUW5HF_4+>+EzbWl2mdW~{+pGI{GXVr>785&I)Q+s=7E6l|KCvR7;w-T8~%G~!}|YD zSFca#47A19;ppj9zT=Hn6YBBI&2}NV)xpg}nQQ_6w3o=gs1_`nRgC)8C81h!ax|30 zgiR^>`m$1aL)xIdTmGc_0zYetJ^|C3ckf=Gq}x8))g{jt3l3TLJlDk;%-noFt}4^) zQS0A5BuG-&?F8JL&a5ZQvl>YJ}ge7Pa(2h zWuHPuU!Fx&_evs--bwiHS60^(noMR}?G{^ZY2KBscFUyu(O!0wy|T38rFLXrkAp`z z#<{oQtkBo$U+0;rlEVaF|J=&fj>G>NOv6#3!}BStKh(u_t?n%b=ADjOdKzst1fPKtT!u;;PSls3GH*8(ke#4ZT=CeuRBm;5XJj8LxMCEEoa68 z*NvRwvEdy;z#;MKK`If9ZMtG}wb6fjY!J0e*A^;EHfodfRs`n!~v+i-5mD zHV8?P{w|e=cUJn)FUlLqrc{#!dW`I;9TFrsT$Z2}UOaF&8<7uTKqgXq2DcRxscFH7L8pFvywC$()FTN=KlB|+*YaZM-+Z>ki0Wk{K!8Gxu$e&a!CE`V)_wOk6Mjci4-*W zD~OL)pm7t)2cRw>~Q3L#62I+i%>gH^yB!Gn!H`{dLmGW-dVct`fXc8yrtyH1LDwCDJRJy ze>t_iaLDZXaq}m!5j!L$F;&7PLUOu*z1nmwTait{&Svgqbpko4=!T&5sY*ijgLx2s z`Bcb^OYo9IQ)%Sh~)mw-Mlx@zEwjE#}B#Tb+uHA;^D{>;J z`1@i#K7hp23li{S9=1;8ZRG9PP0_g(Xuc&wteW$WZKVCC5OV-#XQ1!oJYW|AnC=RHEn zDz;d^_L?Hd6r#2ol56F(Z_OyZQ$YJ4cq{CaC;~h}48KsUvj@+5d&vk&-$sMWh>;yD z-EaI+C20Y}4Kec&2Jfce0=FHPhbdbzd=nk5Wopw;iybc{nfHoDS3@;W_q~{+B`~cx;0r$Ik1dk+n*=@7( zhGvO*{i$pvN%MsrJ^S|=uCV)HnQA}uxb5c|jLoXCt6>;?bNAkQslCltyC?+)3`Ft% znD_z}aX$CW;IzRQvF6(jGZslZMf$-+t-Y# zY&i2{s19o%2gqa&#_)}@gL?ZUZ2MkB^5FMpBqF=brfUEetEfGs`GG6JC^j`(E}%j( z<;CyDte~iuTZZnIIpr5-t(bE2lbiHgndqI&I33l32pQnucDcQvNbEBLJM`U9X2!Q@ zyzCyOd<3ne8t=2Tw}aA?3$t~TnMF$2Id0{hj!?D=LwdFH#F>(O1*72*>KE^%TAt!@ znP(EQO+!jWbK2vFrO=05V*EN*_iUYL*}@LmbRLEe=&QRZ-Y?lK|8;MJls7*?uZIxjTi51}WyKfvb%yZr_0m1?w7+o2tXAAg?V@(WCEE7l zgUHb;Xq=*jS(RUY ztn33~VV-E$%yHb+b?$Z*lwDg_#nJiOd-n)d`FV89n`%uMv`Pmx*zS5s`mMP8Jg{>Q z&OxK%5sW2ydp3RWvT}#MV+Rn!&ewfBZ7VnVK`jq^=uSim$rM-HfgCl`bk=?mfc-U$ znk~UUU!VnM@l_*MT&eDPTr(qFm19a;cXCoaq+xGed3qQ4%AL9%3SFB)&?~aKR5mv5 zvwkTXGqT>EMd3+a1swB<>fG^v^GyrN$`h@)4}S{@e49-(!PxSeqUeVp0VZ%onA$DP zBTV+pw&}4e!#)feF9ri>yep`D=Elum?Bh9AJneQzzdkG`x$BdEwN++!I-ad|IAUW_Db8CS&g`4aKRTDLOM)sNXQ%pdii`FOFi8P^l#nLNO z+)}0oYa^TN6W4h6gX&BP=XI7c+pYaFq8Rgwz#@|S;v+=(SSIuyC^)Co2K&&2nW45N zwk&Y~t?)1?oy*Q!_xe!6!q~M|My9(UB@oCn2LDTEIK^Z{7L7uTeyIOHWz+Q8C`oC1bJK5EaEM{zqMr@zAJDH zDq<=<2a3ZYBHiXlfo9!nzj#a22?8Ru2N+Xqon?cesmQ=!geFKhacEz!xP1P`BH|<$ z5$MxxmAVa{Gt{-n>I6pB50?eV8T=*IW-lYiu|UXFW$)o6FRK$}@l0PbRC!3t){^|i zZOXOm`=xEDy;S|iS)@&?ZfN`GHohq2>lQ_t-eVB^4_t&G6B@gwsDh0caheLI4=UNc z;asEZnRrE(cZJ7GH-R4Tus02H>=pQv8Wc7D#{5+WA)`^`nW4t})&a-SspQDBprx9L zQT9=Ri>zo_7HN?KJ1_m@2LeaK7J4TDwiw6zkBYYC%HstwnRoH)uCU__BA55~k0XR- z#6OdR^LE7-`bRmw}A&F1wF3sTlmDBbgTCB zUOo*#Z6xKR$?9d*vmR_tB~wiC-Q0*U!10zq&6{UxU>_iSQeO_Q+<|KO;cezL3+Yoc zU{-Xoa02Jd`|UJ1p1gNLIUqVbe;y$>IA4TIJ2Q)y=0yZ@EwC71KyrK%v2CDMpRjd9 zLuYMud+)qbM;gY6sju0gPZ^4Qg`}cxC31~8eUw3VuZQk#ZG-cDDZ|tOanDcmQIW<0 zk4RZDnyYInWwTz{OE)>Blv8%d=pU708fp+o5W5{-|79z`!+F;DOVI zv85Z&nxAlEZyC!s0gSw0-ecvr0MA*eI zm|3r?n&poL#41Jj_iUVwW1YHlnmGXywa&0`r4X<>5Qz3{irc5TOogNMO0=~zE>q5~ zxk$H^a);dFaF+{)Gl#P^-$$=sH>iC0PY-~PPyI3#>CP7oXaV5gV9Pb$&kV$&I%Qv55533!2F&ZY1R>AWi{5!Z`QbQv(c=0F|5e4 zwZ2OVUv3WtG-NE&HApx_i!Q$lRxY7~?nW*NRbHRAu0=Bm(0?v}3`U~d)=Jh;PmkCY ztH$w{s^Y1E)Y_PFtj#CtRx!Aqphm<59EkfoO*9adp4z8KOJSSesBze3kdTNMRTrZtxWbF~gh zzAha%3+{6%QODRT+%FA=k>bn?xMkh=;upAHt=toa)hOf2hm`J3Bbz#l3t3V!lbb&L zv0<7?wA?Tsknk)%b@|K1;!blaCo*>j9f$b14*nwbyI#K=jNT(j5Zfy$4=iSq%Y6(y zWaN^_o|eDu0h353m~joEb5UNQS6>f%T166FgYRGM@Vha6amfnefWD6N52?(>-xY4x zP7!LY4G@(|)I%uFyEeuC^Tzb40;Z6Cg8Oe;$x@lnTk4ITJYP>pga`;?!tB?ZaceS8 z5?$%fgK6o;{)5hMRN^yW>3nn4TXRUf)=$rvFk zC#ej82H+q4$i}gk8HFi3yQyGbc9s_cK8xF<305Ju;tLmOl$$NbK&3pizEpBxHr1k9=ARv zrtiO<2m?$e3H-7pzcgLtu(|>Y$()k$VZ78XfNX`XL*cCVY?IRzPZ+|GtnGN}E;EYg z_JGL^((%=WPZsYCs%F2sP@kMsaJ5@m7+|~d5LBNF+RNi(2OdVW`+cPm zAH0I0=Xt}BA}&{SO?R4(Jy6r@Xo{2j5_6;@hNukfLw(GM1GYM4UQTy10F0>NkA~!6 zZYCT)jXSC$1FYeGyW$^Xatb~_WmyhDJi~n_(#*nndkpx?)t{)`Sg)vEC(@MD#!p1i={>)^gI}2r}S*Ikmn-YXCZCZ@2*D( z{WL&@X{zdX;p)`iO2#mlTsWM-dx==M3Cq>L=q}w}V!`GW=!5sE<`K+JsEXJg*rQaT zH8;RR(FpWny9R##J~*X9p`T%}Tg}>wR(3&>guhBAFBj#_HCYuR7>h$|t8cfC_nBSE zxd}cM^Y}&CqXljhz_G(VGOzbcZzznCNBnj6HrS4Yf4$z+YO@f!5>ej@UGQk|(D_kn zu-{=GlrKAvH|pVv>&$BYpsxL(AeIGf)tJd($E*~{DkrENukJVPZ6ZHr1qpoI=Jkv5D!f+xHb*)kU9oX(1?8;dI_KJ;50fN&(Q_mliv zs!8R>_a*^EclU0|It-3SMrLNPMT$ih%~dvq%9|fv20PRrk}Ow?p-ho+-TntSQ7{Dl1-8ufPvWI-7pa)?XtDahRRfJ-J4n zs_;HKc0LdXpu2|Tz=|VTJdo0?79C~lrbAe&Nye3iep-5u*io_H45-)$nI5kUaH^RU zsU0a94E%FQQe9x8ZJmQ@KMG$JE)%`D>_CHbq>!Sy5f{#yD-M{%KX>-J{e&OTd>WvTu;EZ^DVw z5T(~&&}S_`C#qNs0J}f> zLrDamQt{X@abZAYLb3?U`-}UebuT2z;U{%87Vwakb*OtcE-gGyK*_OjMEj&hV6iGW z$%1hK(MjcK{=}!a&xC_dvA8cJvcWFa6!h7a=#@qFkU~I#Wj#=>9d->1MXl?&PwTx8 zy|{7QiwlbdiE`!Q@NRd9V~p>#rD_r|OIiWzGaHf(jMmgZAfWu_iWmi`yM~RgQRz5J zOi-9b`p*nuFxPIyP7AbZE*UTHH(~pX`^pDnez)l(4MaM$wBRCt$`ZSFE(V8%5~vLB z;(33uitV1SSuzd1H-p@KR1_&)UM}XgfD#Q&tYF~F*ya_`eYfo&%JzsVG&~JbwG~HE zUKi$9Fu_4_v3cwR`sjT>Dud-XT8FVC0<>_wQ;8@gUY-~t3f@cL&hktdb&S*v7Pnrq zknA+Vbt*y>96%k)5EYC&#(eZ}mML`WIQ|@4XzUw=1XL>1WxBt3NMpXoQCOa-T{}HY zYQEXPai53N8XVCO$bPH0{^mUoZr0r9tZ9{A9E($IxBGDEzi@MaZ$2 zPA#W!5a^F*O62U)op+|yDBF*-Ez(B_jC@W=Y6wV$#DU8vHS!!i108T<^cLR73IKJ0 zWBaj(+*qSq7>SoWR{p0nFX3!1jlmtS;&tah0npqYR4nDShbWN?zCHg2N5k5rE)Lsk% zbMzCF36yB5?~~O8dI<+{9)?<>W!IZYl3fociE48PY6rWVLRDOscL5Ac)he;v;e9gkdG99fKmT-BU74E_>QfV{sNip?HLBH4*9buwEM$zg2o>m#INBLmq(m+-%B7+|>97rL(~`3x zeIBf({j?b*yhrQW8;=PK62` zXK%$|=(+piBt!dk(LM}Qj^@Cr?YqnnMGwSGX6z2c_}M0mC0hiOO&nrw6~Tj44Rwr43J?%v&*m?N7`N zSpqAx!l2S`O?G!(b-8Nw?b$oFN+@&%bGx&FN}RH^uW|lLKS6SWt%>!hh+}kNP~Y$h zxq91yD?!jWp|^x3cPO&V9`HAGo|4#TDeeeH$bq58KY3BVXA155#Ct^;RPS@o_!KYB zoc;5|_rfY~u%PsihG6hSXK|2LGE1dGaU$W%Z(I+z*o>|xdpTNr(NW47^l#Rk9Dy0u zwu6fOvQr^^nKQt0Rp1&iFZPMq`NK@iZGnb?f;IO za?m`pZM?J0BX9XCCpz|6rE!4NuylxEVX)GbxcxP> zOT8*_x4D8xI?VtlMrx-We-t^?Q~P9tnrS$HeY!DyQyS%uA1B;!?x#Wi5jXojW9 zLs5Litl2$AYu|F+K#gvGztv#iATN88v}CY$G}{xPybiYV8t)xv-?DFm*Q|jTLfOuZ zO(rmcw!rUD1rU10s8pUiHi&3$=#HQ3Zifw1;HuXPi-mVU1b;E2v5|uv1qIVRH&90X zg%^uw%*-_$Pr1XxZhY8}fuB)p-`}nF3t4X|92_s^nk!>;<72*IIA%^`9KEGt4E5^Z z!aOD<3st~!#M@iyveT}i1pM-BrPX0WEtanTipURxUf-3od(}*bOT6f)UVz^$N{d2> z7xxAmi3o>Y8&su3ZomYjf}d|7?gSVJlwmppJO)_C%R>>{Ix?UfiR(9!oo;{pe!zC+ zoXtdM34iC_Ep$=4)1SBj=elsewQWB^_pZS)e#q0CVuJioNjz?5Y!ymZ>RkE34Hl-6 z>bdBfI267?au5xnc*d;cKrwZd{PCQWUAYH03@;tnuSh_5lp41lGtsP-&#rypx_ZsJ zTFH3Kyv`?eJ_%^dyl#3C*P{6SSDCljsn$khOP86iE(GKpHr{6Tu$Yx2(i+}T3eWYt zH~zU_Hpl&$E|&K*_Qs!F>NS%K>Ykg{^&WxNHtsMT>J-RD_AXxj5_>Zb2(6QXQg6tr zJWAZ;68Mv{qNC_b+oge7m?2+#F}U#4F|A-4efNlY;ul!^A5;z1n>73mf|G zKy?*wBS+<1dw2QV7P?^R@2oOWRN1^^GIqau4-#em1n?$O(U} ztS>c>-zZ5A&o|9%Bt0XAKlYB(Uws=OR6NMb(_~t9-|&%+i|$extNi0ZV*yaZ9DYU%C7RM(G9)jJNOHo0^%*Jig)`|MzOR< zk-QB@lWU9ClLBZT%&~~FP0u^;$939q2xJNH`Bx8sjpkg1gVdTqf;yP4YK zpB&cNk<=>ZKJdvzFnMQeOr8tR;anfQHWXt+C1-&gzu2a(0F2V-?l3#xD_n9n`f-S8 zK)I9Nok-@RDDnBVCSz)Q#;pxRt0j;G4QFnYz7l0rse_1l2Y^ZV>ttnDAQ`#B0CHZD zWEa4nN0$+Yo)`W?QD)uCzp6miLx}N|x%a|tBL9h<@$_8+YxFhp1W}x~k<#_+M@+aA z+9!MIUjX7l$e#X|GFcyrU(OGb(%c2!#c~2^fWTQ~T2Py8OS1=itbz@^XTG0YUra)~ z^o+l7Hu9txr;2LUf|m5Mgpm8CKEK*6|U*3<|%!4j{(ho6P%(E0x3DzK_i6rTn_>$gBENncvVP<6t;b91{c-zg11*;7BKhi{M1osBd1?B-xGRg&xJqNX7vty1OX~P>&nnp zll27!_xuI1zx7Ajx3%;~UlKPK%gTvDSnf{D9yFQ=I&N{sFr^Y&>VwPrU^P1;AQ)qH zVpYVLT~qq4-l4=eJ8ua^vKnM27-is63u`0k(5~TZxvI z@7;wW^(ZLnT*xq9a@3AwxdFjZ9iuq%cLRLLtm_oFVliBd^bqcTSKXy^MG}Ma`JP)H zm9He2tLAJNf~@`|ZjsliR-_uA`al-fqh>^g{q?7^LIWWX)#8?xo^SI3h2?dy zyyh%BfztjJSygR<(1F2fL;E%_^X6=vLO$ept0d=EK`Vpo^8iX06Ah%LSe!n8g3IUx z#Voz^^te)+3CZ&Yg9Cq=W66was;zzxQnvKYY3N1T2%{H`Q)#RfPfBKINf&zy{33JU zVCp1S3$L8LbLJjvGkw*y-;+!Zc+kptpQuaGJ1PE?PT1@^{$b~UKW1}Ple}xWL%z)W8ba1 z!%n?BYw1MxQ)OQFB~*hTPxLxlnReC$(VK8>BYTp9ar@hi*(bpV^Hhf{0*hsz3nw7Ml47dS`21qaMU#(3G%C; zb|_|qTT!L1^`Vzs?C(Ez&&|9gl}!2_Lj&8VE(ir=nJ7MG3M?1=@Rq|5{zmu=@- zV|uD~y2mzn?9xRMoVV<0@P&FnlQp5a>W39+JYEb(y@2F%qI=)ytY2^$$E!4`j8QQk zTOj#JIk`AcKcc?|$cU8D@y;4+ZEO=^2(nEdE{XrH8iu)TD458r-y-sQWkR`yal0NV z3emtT`Y9zp@#rM+)di8b8{>bth%Ya)Nx}PIA16y=`v?AtIKWDl+1D3U~fGxq@id+CIWW8s+G1PnVc_{ zlv~4Dm25qLBU`3#KTR5>k+~nHRFpCCM?M$Dr}}T_V;ux3Cn6u*XVu~XwU!jV8hL8B z0EW+b;`JK>hsAczay~LDa#WerTB;^fMAQra&%UJ3^*Z%Cg<>p3$$b5;SiEg~F#YyY}2YP|JX2rFdNeAX&u zvQROSp8;8WFdDot=C_Pu;?w1x1a%T|NXSLNw$lXjGKoZ{%wI*6oC*I1k29v85)A^= z_%!QiK;YMP_Z0SBM7bUjg^27=ar=n_cQf3@pi_2(r^c%$+Q5wpy59sfG#9Ss!jtm^ zRj>C>F6Ukh!Ff}i9s4#|$;j(IRx)Q+mK}pM!0Cva+7E*fcktH07^z*7%%xK5%>B7mTNv zymKg94BC(Sn7F^Sf1ABid*zW15J9xMFlg9FLGO|O=ph~SLKQwFm1O91sD@CD^nieA zGxMWv)(cQF$C8(#y=dK>nBQX0Yb!T7_V))gD!)*KH*AULRKzRVuUG_$6qS%~YhTjK zD-=snw7qj}67xV%j&ie#!tTtm3oGcJc-KLbT~%8F`>Or)=MeyJbPsP_0aU&@d4sXa zYE5W-0<|C?-UN=06bZYU;X4%E37(a8Cl$dcl!DP@)v6{v2xX0~ZF=*b*_AybFTElQ zgFbZ(ek$n@Ku0BI^rOM|6YUS~O3oh0)Lqi3mt0XL2RNOP%;gpSv`Yl}OsP_TW3CA& zNBRIN(sUrW&3X%DMYjez?5iC@d)yN|l0%BzuTOjMimh~n9}jPX09}+{^JyupeL)kc zvj`K0$v7uzy%m13C2QVcGIXcwSb1`*V`oNgLFGeO&j;ctK;t0DhJQg!wIKqk$|G&R z(Zwv^H`>8PZH=w(E^IjkcN*=LI^~sRd$E*fUE5tDR~(u@(1G|_V63y#NHxTa!?aQD z73ilY|8jr1L}MhlWa_(3bcMQ}d+-8WQnduyt^Y(Xhoon=fXyoA(0U?Y#Uvtk(#s2Q zDFxB@I~@xiDEVFhZR*wCE*KwHJo1!4;}XJBf1$Smc+ckq{y>vE-=mYp2mi%WqMr=7 z%{T($>9oGTmCuqo2Z8=g7O+)DI5;9+0Fkb2snOoj&3-o=Fr~_!UU3C@nyMoX zshm8)S6Iqv*`K3jIi%5@BhYJI`{pZ(A9}pJ%i-B$4qaZQ1!?TKG%nNohI=;&hWtTw zk#m9w6;8b7<{-Xjxpk1e+Ski-w0I(h8rT-3k!0i)$pS-!JJeVe92E<*;iP_Wcg^^5 zuV@zcq^;=0movc9jc)#`%wdy^g*gHbGc*z)2cz`IDS+3^J~nfNo-G3pvk|6T>1#&+ zBUoAeEAFFHVPk{3+Cu4o>x|%Ksn0TzeJ#RYqfW_`6m;uZ|GR;d`N*rb$!1ADX36T@WOi+m){9y4N?rCR-Hr;@E|s1o z{!eVk-lm7jf{~S^OMd5J>&oMA{rkwt3FB=kVwN$>sO)DW$Nl;7@$w%7oy%WS7qZ_5 zv&IjLBdSkmf)3xv{NsIrlPRSjdH=LyYGc43Q`W?5GSb59q+u%$oMS72#9}{#JvqKW z>jC_Kh05@8sbFeD%_98r7-QvRK!&Vb4OGSeSMngYe84y65{Vn6!{oWuNvhhqr*2bU zef=`wYP#R$N5k-xEa2=mL^u>T29`+t`4I9tD-orSAQK&?6%t8hSonFKda;@T4%N9d z6kp>slG$LYi@tQpvbr%=V_irkn>F@}$yj8(1q~rL6dsCr5__pNpf08bg!iK06j4K9 zHYXXr&mh`dU#+m|D=N1(=P_y;vsGp z03b`zKIETuz}+*WEIrqk9P{bG%dshwPsW76Y|8@m0&%DoV=TwruQZ(wepb?I;H9^$ zW1f0aX70Q=IG7w66Md&vJ}Eb<@u@h43{>K;MgaUPKWxPVgikfum&0Oh`x(ohF zn&qOsDt-M_^}$@h9<> z@62xAmVrcb{7B>1MjmI1$Op$9wOKmF9-L8O6i{)R#X@#R+?=T6xoG0M@nj;;@D!Vp zrKTV%>setpdqs{vmVG(Nn8tiu*H&o06_cr@{XejR?_=dVN9pp5xO;bgGyxY;_#YHX z`cvTAOjj>gXx8{C3G!%bz(r}(jT2bmnQ_OG&p5(Bnk3`X#qV*Y#(P3_!*!GmM|LAR zM=rdb7?7V9sBJh6>nWYZ{T_Io{`}4Elp4Lr)C2QxRX#Mw2t#RbP{C4Ifroibkw9Bx z#nddb@KC{6X4AiKay@BUjpYejbJ4HT=)}H&W2RuV)``66L_uD|;`C`UK7+)(#{N!!DjK z&Do0%^9k&}{&j`y0=b(@ijAHDO|B#^$uiB|pRB4c> zYYc3}aT?CF`1@CCZ}!uF&Qg&TGtnt0`&5xpUjVve_Grw^Dnc?{XJTXFLAUnPlS%r0 z+UZz)(QD}f32A<0=)JdoC-jC=P2!?Ar!IbMtU))Gn9Lrq&#>bn$5nz4FmWH8yrJ(B zRuB_MS(pQUmSDVXl&1)BM&<7Hh0u^8*IfE>ymlmRYu(m=S`E7O)Il%!ao|yR1ajUT zbK%zgWuU(0oqX_=IgPt^&WkP~4K~h&%c(2r($*ZvOm5BYTNvNgm5Og?rEl3LCIH6L zF3jkA*k=Dhdda84BAH3W=$eoSa}-sdVyMglf;|#6WoLw9HL0k<&|O-OF@q{(bSIyK zZ@G*O{(i+vWGaeCwyYg=jG1^xy8SUZ_GfwUnd{TCw)Y^Pitr-a*`-d99=t!LCr~Qo z^4KZ1_xV15=T$gei@vLY>Ut(;N8D)Fg-N+^1_6*A{|P|UgsmAL>407aN@G+G+|&iI z0zO!24><$lnit4&k|mp2-{2Ee?fIVly%Sd;6bb}+?SAG9JEeAx=n&R-HfcF3m)uEq z9rx_FuUA=mNg&5j(59Ab{p8iM?kDzL}p?i9=g+*Q(I3)|R)BE&O zqGwcBcf+n+E^6kC$RMtCGF$~jj8bjXYI;d7QE9{L5c6D`4t*nIHK`7hi5-1Ep zUzaRVny$iEMWJUhMH>+rY*nrPs#~2-3~cq8t_c0NSD;tZSRt`Yb^V zhOF6U*aMn@&R-vHGszzZr*kmeN*naly&$9)k(nI`Qwf7}Uuk@1Z=U*BQ(b?VW=+h1 zHmGl`Fs`SOeu*tU%3@1R$zFZ?f>dE#(?qS;XTE}T-{)X3q(Yr*pV;J*NVlcFGpcKG zd^}~nN|GG!pM4aMs7Ci(LlxRw`5)^(MRfVq*5$FDq)xBT<})wd^2s895}y=T0jN9O zLqV^~!Pt^5%F1gPrtDA-AH^8-3S*0&`@aM$@rc>1m^`I4F?o zdq5>bOV2+Pangn{)*Lj4*kcTRZvDHpLJrB5214=(A!dpIq}xzz7mG=CZd3D~pw~)b zxRr%=GYm*wTu0$-^PQR-5oI#S@N#kRo=7gxd-mvS)5q+BD1q2usz;gpXMyvE<)nXi2d9{|P-Xr{F_ z`}qq>AU{CwW(Qu4xk+qBsNj;^3MYG2g^c=5Ya}YYr3)E3L#d?Wz14QCS8XMv& zY#By=cs_6GmR=B29$lW8qP&5(ppx_{NlzD8CMm@R`(%%tVFw(F@N?zTXxGGb#>5mw~mdLS24L2Ft#}YvAg=j(SpwXCuYKDE#(@!uUp%@4##f%GvI8 z@j}S#k0H&2l*k8^s&|gYH^>jlQa+&*C;Xo}O*2tphKmy(ChZLE=V+*8T4)PQuoO@%rmbknHqub;(~*7BEw=<+jMYQpXVHB&>5Uc2>)IRbYAmVj{FY{m zh7+=7SetC7@gk~$Bf=KVgDGJR$4k^i^=6brdi6ss#T7CuC;ZDzAhL{-+u~Dyv|b=L zbwY0S+FnSS=H^sO<$WZ_-uTkkF19uS${us(DFO8r80D7X9UkV^D$+It9*e|%Ty^v4 zEao#L6&aT4>bRHz3%Z_laWykzYHL30>@4LwH(^42s!;0W2^Mf=_(zd6P;7cXV%-v; zCC0%nv9GWT-K`r|UUo-uBD1Ks#?grHt?T+K<{D!s7%K~3WB?&Hm&fDy6_pY>J$ff1 z+cfZ}rI|qC;Dt*lH(%6r(^brkH|8PE+2p5ZBBWr4PNEOd^A$>eRfua>7dmG-oF72w zFu&3L#KnCUOlb?^fJeFCqm4Ut4}CgXg&)Om9IeQ`^YOz}nkQ$pm2KaLbTgA2p!HtWCyRJi)7ityba#WXNSGc7ohDa7~be-+U(&{=TN(D~1yJ$grO# z$H;POf=@n{O!Qw|dzX-XQW1)%7;y$F=_hgPWEi=_uFwS3piMK6YfX2$LOfh)xHfH@ zuLYDjx6ZC}tfT5wk71bk;@2$%8C?T5dy!`jtSTccqEw;uEYWOcv6b%nQSstmv#T~X zV?UDWG@blld1<3-ciaS@!K)(J>@sDj%PHdPG)qH@%lAkrqcKHD!ENj4m|y0pxWGT2 zfQ6cYH7h?{TD@E1J~Bops>Gc90=EVs&SPABKf*>upBs=jtZB;z-dkYQC~zzTihY2A+kwtQ*OPQ0e5r zjeK7=wX|so+P_MYRC114PK=kN*d+5e>z_qSIOnf@r5n>{I{tjIe2T+*s9vNtrh(|2 z%$=)iv6%`hC9D(Tw}9`C7_NZmyNwbut|pY?c6@^4LJmooX|f+g5NuJRw*B)iO`R;; zIdab7dKgw(d>0j<@<~2(r~}!?^DaTFZlU^tk+WW=wFecn_(me(6+DU?x`|5)?mMaM z3{o&Lq)Ap|K{s#d$Cw5@=nZK|U9D2A0-z8sfiw$jdGsMlNNO7#{Z^)fzQ~VrbOM)- z&I2brS+fWX*3jus&Ri{0P@NdOEHETaL7OKmrVO&8_1(iCnt znxL|K*|AgX{d!*#{=Unj+Ymat0+2(4xo3C)r^{%vkXV3zGa5k`mn%|5!6` z!kIK^3c53zMqON87&pE@o$;eDn#ry5`76+4$>)+c@oX2l`ho_+0`aqz-l%c8$wwW7Z=N9Y<}+Tz;Hd8 ztFL3trZ4tmNOhGH<%VR8Hsqj$$pM}89DkEi3`R}xqL2$@qdQP!caoquBxD&@o*6}} z+j>*#wICQD%OCCS3AA64W2D33_Jcc`aN$VdEK_+h<13NIwPfsed(4wqnoF=N92hcd z@6swrncV$+0LJhM8h!OU(R1KzzR-kn)GB_$9U(0bLq%p1a&rbzT+ec@bs7JY^75JQ z*n4nGD3$D;4?xEQ+n7oBlDg=BHa4Eky*ec6Y(NH}ZJZ^jpRo-~T04Ara#%6k(O}Yb zZT=)*sT6V zPZ)Sx4?YUs6{xm|qBkw0aF}n)n(rcY{7ZTIdU;@8H{sI`=1ULx;FU^A6?aF(*v%)C z-Ud#mdvpDzma>{X!`-eEbh3O@X-2vE?9k?C$sKeXRk2_@8lF;D`!00(tU^`*;2L@k zpdQ~1nLfdgIj)PWUDxqos=76p)614=lfOs=rVk0?j1yUx2T?<>uI-JbJod`e@GLBy1#&G)-+X5r7qSHL7Z&I0%K<8#qbmve5kQhTNBz1MyYEjdx@l_G_y@xzn7V`rJ`b(nv)UU9eieOEnh zbKDe4iJ<;Idc$m=s8dYH>7Epg7LY@ z>~fugiOs;EIN3V4bL{_N?X8093WGImBuLN@+}+*XgS)%4VWYu48+Q%v?(PJ4-8jMB z-QEA3GgCD+Q~%6d%(+aDDI+4t1j*mtD@!e&`8QjM?aoh^ssPxH32f{A$h&Ouk~2WISy0Tvi|X z6+BuwA=&?&{ItSy>_qjG>&WlCam=xQKN8D%QuTbW()OC=eoqhQ@vvPzsVO%ae+N6u zi67a?r4EpcZ0ICEk1E81DGg`vSe& z7)DOMmHtXtsIM2$N4g5V{;VTU3*%(bGA|(S@ti)mH)!w>L=tt`pO{lHLa&>~9?Wag z8`FJ+QKMBFP=nPIB_3#3v7G~QKqII(@+xnx;+pQ!c>dWcpa}hrhcZ;}=rRx9T4n9s zt8tuW6Fb;8{9b-j*G4CfU?Eh1&^b7Dz(=t!5;teS{IV9et1popbi;fR9I{PKZ$LGb z%SnfvwDCJXv}B3PVmuqRd+cua-Sxo7I?PhtT}k|j(-U&zfSUj=h-a%lLZEYU5Auo| zvM66+Oiw^^Zmk^dFlJBrJBj5-0@|bs$)ZFhe+_Sa(%Qj!$gPAeiG8D(hHVI*$Etef zCa=00G4WRpedNJc_v#UG7V@34V(d)7^NhOugtvE@dZ2D9X*Vj&9FROpxSWnEI+1Sa z(>OG-kM!tJE{&lL2Xz%be$~~)S@^wSD3cqf*hw&&k(p185`Y*Tgs-JUz{GU%TS_zS zt!uzR^?4kqH~G`-hHVa4y5Y91V+*7}^dHJy}XO)~CgHF{FF{k0~Pl-|{8cio~>AzjE8(|n@B3NvY zL~p_%1t*$0;hs*)Ypf}6($diRaBp1G&82NRGYgVOx+2i-%#Opkw5)&#n)7yB%uIA+;YIy0cyhiG3gVVj4pZ~J_ z`PD%*vmwiZ-pp)=L)LMRgtr~8xkGqBIOO})+MiNZsXEvMSdQ1Zh*lZgv3_1zqX^-d z@<4?#5e#On7c&nY@gK^7FUWZ2B(l>4TiV@AKfo9)og)F;nr7l_6>ajrQ&=rY=*s8= zDB%y{#v0(hk7dq;5IBlz*r&!g&_iCnt8}2bmzdi`^sFY7 zaBmvqXkXlEEj&IfkGjgPMn=_hhqKuK`V*vY_H)V_UbA<&DDg>M*RQ$=+m<9Au;M93 z&D-DczeMvc;yRR@m<{^3dFFAT_!Vhq2Prff=t8wgl6Vxjo?m&r;c@Ursv68xb~S7( zoFV@8T6$0vIJyaeBxFBF!~}N=)u|{Yh|tI5FY;AnF~QgBDun^JjP8!Xw}GMw3HbI}z8BLzP5bXuQ@&=IsCt*YK{8*N%5Umg|VXn86FVU{ydyUV3>^9z9o_VcJMlGG~#$3WVZw>e(L#QqJFUmO~_$pjuCM zk@WxKbtG1Q0V=GiB?(uS-&h7*AOT+e<5pJ+$&TY!ftYU>L1M3FNlE}{(%K6C;I9-% zVJXeuT*j+z#34BPA%>@P9)ehc=x4I1%h!gDC*AO(_i^9{n;HWN>3<=>At#}8KBrD( zI?q?Zec?3t*V=zD!ckPH2@T~nHNhYt?I^zepyjCCdI)gU(*9>2z5porgd`SjxmH*!0n2 zG4(AcB$E;+XY%X^AyU0JVI2nB4WYO{WnORh;&zLi@C&|v!kY=QZ9Fcc;S-fhuOnX1 zxwYwJFH3+70|B(ySY~SsdKn;rr%)V^@!M!E#`o2$S;y)e<;pFWjAkC=of)_AlI_UR z#MY<77B*M3)Srv0?6kZbyQzs>!@CAHb+x)Oc)V5ybk%L3Ne zsk%M>)aK($R)L5}?pzprLI*t|7_=gr)d8raGk3<|#bxDn6BW^ZJUbdhS&HQ#Mc|ETLNLv0zVAC|_!(onq4*cFO$| z?Hem^^z}7N#*5M_U{{`yEayhzR^a^(FB@dEsA_M;D$k>DDE3@Tm=^tPl=L%?xnOnd zo-ifD6kkth6Sl#HZ|2}o|BNf3Z_ddmmhZv^GrdW|(1yj67YP=0yS!HfBoBZq)v!MWk$W~yYEB2EHe!vSdAsBr3ZO9R`eZOvqawmvuI!Rh1;SZu-mY2)9`#0 zh~`oF*#Zn{`8@ydpiobSz}vAAkccODF3=fX{AN}klgN%S5F}Wu#B&NIqGB%Pd0IxU zqIld!_^fl_QtN!QB*3N}ykBxkS}w`jSwInnm;-aCeWgIWbr9ObQzxIM`^`XgMn(ZG zVX(Kh!5xZMaPO>{=nAZ`B42shyyLXAiHcIBnaX$s+@WZnsn-M+27&vu`^$V+8>8Ku28E5n=PJ|`RYI_gm}pb7hX^ZeCG6=+3iW49#eE?- zsSai&f-LnHE)T*`8Qj@Cu_$7U+{hdON?bs~$9=x>oKUgH-#-C2S&itytfbZ)Tsy{e zdIeu>jkFUH(Wi-Wvv9`Yf@`<^gxYP3$+$%(l>PZ%ug1@b)*6M2-WY=0MKl{t@JRLE zu(=X38^N?bSG}cmy602meSd&%Ig8c zp@G&WBEQ#9s2v)m)dKf7a*_6_CEejCn$XuYvc{SqA*NkWftATQX1~cHnS_@90HiWJx8eZ>5PCDGy!#SJH$^9HCDIMCjHAca&J8Qmi@?x?r1GZ}CQHc9h@{4%>7OngT>?F#O5jdMamcj{7bpgZ7AxCLA4Ne5t}@qFu1tRMtT5i=?%5UGihkR zTLBdp-33YElag+0Q}`L_uR}h=g}@oFEb9=i*&#BO0y0f$;nSv>+gS>f$TIU*MD6-51uEnf z;Zu3=a|4(1o>cn|j)KslY@P6G5n(}1O5)c7RsN9+5kmp9@K~g0R=EBR#nnly-zWQU zJ?`*US}IQ0`IYG0fdV-YtL!Vy%x38M3>$@Ef}-i%4r{(u*2ogex{;gPPuGepo!+=y zF*3dJqk6>xIBUvF6et%+YCc2zT5otb7QZU$j(IAIo4h?S|*{kw2?`yuInW-Ycqn2o*$!eI`8gR-Xqt%gCDy;dr# z($SYDBcWeKx#=b$UyOr1b^Ki_1I%aJrJmteWMaXO#=F(*rM(y5m{D%o$9e_NR9P-z zomUT!w*~ol8<)w7Ca3>1(gwS^pxo2MXwP^OWR|Fg7wm z;?=)zBipN&XXaNKXlT1EqgaHMWT-Pt7i$Tn?0NKLug;fIg?f$WG+;c}+Vy`GY$03O zF3Ey~gIa-#8N*0GRRN^YVb8b)cWKJdf1fK$2HA!!roP58y0P!cjwjwp_t>rVWb|2H zH|RlTxhLZ%%X1E9Y4D=_a^$lUnDyhvXyJEC$m6@!0Dj?6VDB_$X7W{01JF($XIKCl z8I^1cnr4tLtO;ei*@`R9W)yzdHkFUlG2ZC^5mfQ&J^)a+-Hc%keJYpu*(G)t zqjy?m+PXU}_9#_QG%a`TIIUDa9sQMV47(xknvCls#TEC%hJI%q%QJD}2_D~_khlT2 zmfzbQg3n$&v=Xw5sumZGQhX$GLe|QU;TAK*1&AKEkIaDlu;@`Ej@UP|3y);FdgHn(RY8TH9RJt&EyCwor!6OctAtd;I zAz9ZbnjE3qDdTD-B)ZMvL}(oXJ}Dy0eq(=A%({DUkRtloB3*>O7>TaIfal*@a^=jR z@`a(?(+Z9O=`s4vj6&AyBKi49%@N+FXpGl{Tn2Vr(9!Se@ zZh8F;P0(76fl!eF2D~H5YV_9KTQ3f5VPE-b=b1LEKd(QtyvLDW6WI~N>YS{)@?Dzi zSiM}^hHWkL#wf^!2U1G0Xk^t|79@G9r9y&iz93*e8No)!eJC6VSk{`$QhAwIu-^T= z>Gh*Z9TW`=05rm>7|=|ZM+b8;8_%~(gbR~2~kr3tTLL@!W({s#G% zZ5sT|PKcEWnI=fVH^4Vj5!y@nFEUx{SIu4BI7t*EeosVW$c*)Px%miiQjP=T77O~l z=Zq;Alj2Ce-#E~*XPGXzZB!vIh!DOaS7489NtTY-=m)@|JqUS4qy@`J|3YynQw0O_ zSSDo$Q?;J60vHlMlh6|$YOO;AJm;Yh$I(Im$*IiJvb*H z0-Z>r&hwK_?GbvyXGlmiZoJ{G%C1Kf@e z9nt<`o4}mu5#{iEv?1b#AE8%Z`SVnv1aEim{RJFXHG4Q*7S=D4uek%ccpE~_p6^{#{waA??VwH&{*?C8I_6}{%HxKtfF&Hc>q%nE9dZq4}JYG1{Ke`is z`hh~J6^n)rI`GjF%@DhG3|ow1AgN1iX@0iU!X!_eLf4ao}LQpFn9 zKDw@$&&yL#eF-{UO=*=7^%kXzVco0UBKx$4aeaHWcx{2{kmo0t=Woc;BLljah?nuae)Qh`*DfuSmSSd+Gec^_&QN0_L7xX>JsL%CJZ*C%?>rQuJoT{7HN8qD; z-(unKPU9I51B50AxoAEUnG&A)`@r-99<9_y0+TK}kOqCE+#aZHK4bh&mYXV%Kn#WE z6qzo*UZ$#zOltZ8tM|AZCvVi!s^;l+?7YlD#C&ugwg4t)(&~i74to!#2KhEpA+@Kb zSy%ZuM0=Jt=S)ngTLLQquWOwYwcygC>Er>|y5aayW7Gj*s6=4jlISn8bG8diL(iOgXj#n&^=T_WnRm8HrlC2S#Yk0j{HIumCU2yTV*Nv&_%Y z-$jb9L?{nOR@pI+@p6sV*=xqHE!MOf5T&M9(wVF8E-&Oli=%ywxghNu8yrA zt5Z{|qp?z#Ijg&rMtE%JrapEV?^O8mG^JipD0xfwyawh87EK<{i}KGT^VF3klx;l; z>%UZsaxWC4zTwe$ZvPkM%^1-qI`} z5Wn^%Wy@{7ILELtg_Jn=C%5a21aHZhpi8Jg+xktyV1GHN))y{Kw_TC!>Ni=m9gYUY zj=RcreZ&`wSq#BNvbwK%547A)Nc*DI!Gr-(zQ?b)jY@g5_}L{DC`z;ofVvR-9tXnRMG*~ zW<5T3N{cIF1D7r%AJD|cGh_x;&*rUD4STNF86<5xF8hYqt@)ACxUXequJShD2>AVO zM3FpzYZtJPGpJXf-MAYG^RgWNP+8PzjLtD_=_@M`H~?M2vyb9$^^27RiOQD5O5WsM z1z{k+REZeCNlTmnp+={s%^Xt>xmlXX8^v5i$g#I~FiRSBCz2{}VV1B*Uer?8=#pci zHuiS#Cur70nddLh&W`1Qjc zW#Gzp6unTpKC>pkM8ttSU|G=2d&ID@$GC~kS&XL3JUO~~BKq+s6cY&2R zqU!@kK2LyFgf&AJEVRf9rS6^bCFUE!m4iP5T4yHfFvruit{|`B?TuYi9*e%H#0MXy zbJ1K{ic#+#H!>F!b1{3QEhZqin7zEH!~;zp(>A0MCYOG$lsX3&Fdl%=`1Zs^_|W3m zK}AyfH!ibC7n<}u@H@5*SRv{CN?Y0Bd)H*!9E|nx%MS%!-8S%=qkJV-Sl?OA1i4&ja9ujWE;n%7U{ zZT(i%BP9$2cL83|g2f|dnqe^QTW=sUM&0O=_`o+2w{#$aXt70Gk!<8B~=kU73J<& zGCqG>V)k9AS1OsvVggWBD{@JS znar~Z^=8_U8^kimR2ueEtOo^}1ckXGWZEHx=_gbYC9|H#o4>%3+klQWE?imBs_L?- z(v#fl~PGF%lfu zsD`jpQ&!cY!}(>=eTgolQiFYbT9Yj?_9i&?IB}jWx7KH-66}QA{kfKwyM2}-A?)nh z1a!@OGWgVeJ5x8#O^BYXr2X5N8n_7;DJ|SzdixOU769!5-l7(x-C}&4{bgKjbf}B8 zwilJE+XwCBTZ9+zIeSiO+XbgkjC1nW?Hb2@0wSCJ{Xvf_o&FfHW9WP(n)(T5)Q(q|}p7c@oV8O(p(0_{$;&=`-bc8tW zrSz<0$+!OF%RgN!+WH<%pe3jjF|IId510X}1H zD+v-RT)t5ICabINg3d3Ls79&J>4MPE^QE%^eC;A2~uh(Os{fw&Rn z_-|a>9yTQT5sen|QTdwF{0~ejJ;jV6MX#xS_E)wkss<0L5!2sUIQto)?{p^WEdU)zkO z7af_wpyaF(bNH#9Z`?_$&h4Xpt%TK3S2kD$RB{Wb!Pq!G$~W z!(bED6tpcEAru(cZfv1a*HMxM_uJ3CL(rwxsEp3bhw?TORjd>Tx?j0L>-gX@-9?>X z7ee$RIyjS(U8^rJq(l-0Rb+EcHgs%}Ds({@33jEaMMBE8lx$o6oN+U>RAanRUhsSM z%HFrc@R5m~8_#3lS-ipH-l`3Bk#*W0fCxfL;oSr)Z^bU7*OO#wTY=`#ktnD&2?(5_ z5-;4QDb(5D!VUzK_>bOO-mg>SPaw5Jcg?F1+33b!PyQ{ zPwVaQZ^j)RmV!9Nb5sLP98vvKUvL#Tg5U}Q?ei0_;Q;^sdq0)Y`r6+`fdhUar37$B ztSR-mU7DIgjp`NAEyjU8cu!+QFJqPMub2qgtBF9Nk-mL$9R%y0E3UQ$+)u^5j^ix4 zAGL@+yJxhkV0YH{JrTfudLd3(5RV$=(&Ucb zACCe|MZ^i1RI8Q46RemRD56BPve={z52s)w2zd(Jn8#u!m)<_qrq^q9j1|O;jKV5z zlf<_tu5#B|t5_e|0~<+aU;jmV_4c=LQ5$OFo{>tYYbaI=F_*6W65qVkUg;H6Do{gH z{a`McicnFKG#&0m;xfe;u^Qh@d4EH}KB9~@ty7Z?sztboIv+pHSNL6ngPb30R6i4< z^4TrR0AdJYqTv$6Z&vMx+B*SEYqb$+^A1*DRlTC99#YgZeu#o}>E8Xf9H4JSwSGTM zc|is6*T{Xy->FwA)+w|T6G)@oA6NO{BQqL;D3{(zew~u#31w_j?Q`7KOC3`SXK9x` zv8o&u^wVAm){)sSf{V~*umX2_g%95lqyzhVRSKk=G>Dr9izxDxg$tUf#BP4^cv3wi z^^+VVeiQ0_h_rKgNfrVv7zT${_3@^lEVnDUl+D13^{tXKf6PGIgwtGD8iVr)+kI}b&2le z71WtP%fY#^=>cy_8&5c%8@}aA{+7}$+xe4&r{2(lJL?iRi7g(x<4D}JzmZk$3}*+e z6Po{>kkE<8ZuSwWl`GLMR8_bm;v-Fd)3W*ywK1}j?>#8_hN7{-8U&IcFih%qP-I1j zkp;zxXBcedSsUBW)=Q~*X&Krlv;LS(BiUZbAb8P^`r&SjZzl^;du98@9|`Fd^ku4_ zk`WP}iNy@$M*n=8yZ=pXs4v{jX*TH5&`CazE(6Gu($W0<58!wG%t~&a;&kN3<}A5W zzEo16*cf%62=5xte16`%vpje27<-ZzMwzEI;g8W$$`GS(rpFgDvLD1P|6DKvfPpnt z(Ra)82-%@}a`j^lUnRzG{yc|I94MMYR*5jU+oX=zzF4ccU*Xx^FCisdFXAvpoiVql zA#T~k7EF<+1wx(+@yp>#<&bKHNHt&xl`cRpHJouSeWm&9v|2!DMO{hy^7r`HdnL{|l4_MyRR zj-Bw!Q$VMq=)Rcx-~zgWnc)-Hna>s1socojz=;SB^mN1w-h6!BHm}`^vZlvis?PC# zrS+Ush$$U<9UXsedD&eEC}p2_-*otlfDWoSK0sk8ESGhWgPD8Zl_PlQn|Gqz?W77I zMe+K^$poAc)eY11oIIK-?Ydzes3WBR7=c@yV<**1w^|!gI?DT_BsluUVcril|$cHp~hB#I42qzN4HqI#KqG&p45f75Fo` zf_j=l2#uZlO=jfSHi;gb-f~MH#S?<*}l3vuLAzihr(%XuRmtV zDVUc%TN|RCAV2#ViOxTL?`K##Og)s)wF$>XpId<9toq$%e%*>pGa2JRRMx>?=}pl$ zC|R>FKctpX2|d1Iz^Fd;@|mzzW@3x@A&y1wsU72=7UXJPI?~{%_ZO%E&||_C8}~IV zXjAsHXv3s8#`O}>PLa8rwv3#lR+4kRKiUT_xarnohM77d%=`e#Y+dCRC4Xk7=$z+# zWnNNxjO1E*GI8&}h*qq1G!QN_R_fy4M_5OO~RCuejJD$Y=lcQ)~s#032Z=V?MAwPSjQ~{Cd zV2MDLW@f>r!f8`^KpM`*$Dh>0WFsCulU)AOnc$=`(wIU4-8V)>$V8OdVKORO=ufVB znay#*j%NMUArJQ?W#(+LQ^>cljaaSg_52wqc3~${7hIuCQ-8rTq-9@vB7UW;x;(UaL)G&-6O#&!H{Ybdt#Msy|~P zSj-;apkJ16<@O~CC0KQHEYAcQtiTYN?)F=W2~5|LmYJKwJ%t3ZAX1exXM=-3P{{Vj zwH7vvHe$ai*!6&&E%I%t1K`DkpDwqroWc~JaSx}deWlq@vGmFIU6)%OsLkAjwGl;6jJT zf@-)=&5vgNjVitNLe>mxuL1f3+dj+H&n%5g{KX^pRh#>@~;#4Q}N>PvCTveXO%EpR5CHc$OKAAa^rHt17xL!^^ERH|ZlE$q7`E1EtxkY#- z)}to-ia7j?1+jB8i|Hwlxe)+$DAi$+1t{v5P>937cfTA=<^B`A;=7(%8LHW2B}p z5i_GrxHWD^fk+Wjp;o0zC>zeBBkPCB!glfBv8jI^rcezu9DK@K-wm71MxEph#rQL# z|GtnTgh+5ma3@n9{JW(ihb$A@%M?r|2>@bfu#m0mAdzMNC$r#(xfkv0+24ARYuC1d zGk{8dKQUkdUmDjsNOI8gI)^iYd^%nV1vN~}2%`51y?=^)Oi;PLQsJ>-)kO!q_+36~ zY6)o`M=mlWr2b4h+AGZoHLDHV$68sCaO0IBNtHwW5~&}uR`q4p9M1+MccidjfJDD| zbnVPId#7UWs`e3y6^7}JyMHuDS1mJCs(B`#(9l*TNy=Zp(aUe#NVcMRsu-Gtcy_g8 z?vbN0h?rdH@xhX{(5YwwXu6lFbye34((0%89QB%GemQfx#0Vgc@R>E8QKcB0&3zJ_ zizHrn+tSg02}P+Y`D!dPkWrZk_W`3}ycm8j(gnI@Rz8qfYSx(EEq zchG3&(VOe~-1M8nzh>o9Q>I)gsw5DFOR54-kGYZS(CqgXMVL}p#(0$@(g|?s%EB1R z-}=I!%mINyqJL!kPs2mr#44gF7a|leOsXxXN|SQZoVfEwT2VR{*J12f>eGbYZ*yTE zam=nsq+dC8xs{pzEv%y1@4P(o;Z?uE&Vs+zeM$>Qkr* zD)UXX-MnCp4D0OAsw)eOOfwr&AuKO9!bRMPbhh(I*ML$MfpemCYESU_zlwBpFBfTR zcQ3U4|H^qFfy8+G^9^b$D?~Kdj-8xMmwGySF(caA-Gp1^w**=e1 ziwR9CSP2j@tQFc>D{8?_1>1UHvI`Rf4&@A#(QdlRu*y!#ni8k(UPjZ5_qx>Oeo1YP zk9n*5QwG_Yr&fpZp}*jkhD3iCnm)FVcp`$$+o>T8q6ez+feZHoXOd zn?-s(Vovo=`1gk}1uHdGVjg%!RX^Mw{xY+Qo?X`F4zv)D=oOz7TZzb}&S1?r8udhA z`|IiYlS(HR(dfq7#l$0X|L;&9`D$X=)f8rB@H<7n#J6gzDn-GPGPoVyWH%$vvK@Vd z0`HXmpNsp?JxPN+P{QF=c|xhsdD;a#%`lZ)mwbCd`mX8tnSMK9N+c00Pb|AcC$|jj zbXa6&?Q|^vhabbMrtC!bXc?yI7(%kcZA5Vfsxd^iX1$Ln`1pEXAYQ-g2EoNXeLtz% zLKi;@v0c>gqj$Bw3R3{8X&$bt6{KNuVWF{u;hHRxfMJgzOi1cuv75ySK0zfFhHl~| z1oJ5OfS-0~)NLO3^p2HcY>xX#K>X_cf_jPrbKQ5x3+oI3fSAFAEy|dd3|ygWio^cZqZ#r7{-< z?bu25X86fkZ?Y+oA*`UBXp@?alPl!u<956VfibQb$|;xd4=&-y(N!Zo(oyXyuU_)% zzgyiCjM#Qn#ZA*UPVlBTzsj9!E@-WqOEX6?*UF@Po52cer3f5$MK;wJ`DBsNut{vn zMgunL*R~Qwhg>OY17IXHZP3ALTv98SD<9{2Ik>uy3Zsf~H* zcevT=h(Ubl-K<6+mw|vB>j~_gvq+icqg4MOS-FGR-dB-urbDP(XQjaT9jTQkXkEh6 zv!YYIy_RVSS`JnWkAjI~<^b^(s|iJ9rthx3*Lx$X!eel`Wq6-+eJxEzJKOb^dYYY% z?H2%I@y<*Qm8T@CG^V8kC!+*@7Hma(cIxfpYHpfkwCqh;yycVV$%{OtZ_2#qm#dDl z_s2}kLR0ZZp*gb}O>Ri|Iw=>kV!#Vj?#1#3sRGK(%^N&=$7m=6b*_V^ZgChR!?eyq zn1rcH8gWy<+J>$|h-C_6>~o+B7gLUliXG$N=p<81PpZqw0!}*iV~ROeRmqf7z-8j+ zm&T{h+w<|7An=FE#Agi@NPvrXaX*yZTa@FMBbVGVf>n?Xcr2oh6yaA=j!pjh$&84R za`8_OHG#;5qC(q}qOm0c8j3@jiFMYq?5LAkA$$7{&X@(rrG2s?cu7|pS}yv=wt-_> zCNslY9a?OQlAkPxY+NGk?T^%V$9*>qU$%$hX98R+TVtU#GWUsT_Z9Vfy2G+jZw{!* z{}E?mnM~HV3)vtyuqp6q>xJS{G8{25zR)N!?a~X5=HEcm?B>^7b~V2e|&qU7)saDuU(ZESOaxxd|> z!)W<}r0RRZ)g2^ZxEje&$d0C)O%f*Z{IFal$xBr&ztqNPQYLP00`mq}0i%7^wL`ep zR^Y9dzX@fP{9Q&irkLlzU|8l>4QAvTedFcMQK)odkiKTtkHnuk5c4;d(=%z-4dVhb zSf%TC8UkaA(|AFywj_B#Sv1|d-)Pk8S+12tvCXukq7_K{>P}DB89UL>fV-J8I9r!p zCf4a2uFi4(zAjbXvFwl~DzZFG2 ztjZ5B88)J=-*Pq(!~-a;&4;Zp^5dDc8$~CfG|nd8J(zD`Lys3kWO)(S^o0U--c5tc z4e{vqk;3vZJSiC&+r>*zk)BL+nuJuHlj%BPTpoR*PN-q440s26RU)tQx&5rGIpiu^ zPOKs>Hka?Vleq5@@no+`sswj`c|2qBP}zz+fpY0lI#jeJ!XVDa89rrQ@A%^_^Kf>y z%H z)K00wDul(XrPy0G4A^69T0*4kbkHxsn(4>e5GlS4FL<~ zswQFk4f3nA_H>Bu_ndQk%M{v+CuthLaj{3$zpa0iY<_Pp_Tt|jBcjKbpFW>fH7=08 z)dqK4L1}Lhp)@!?@r%h@IQ>jLbHY{l`6L&LkVC8t7! zJfjg>vp=khnf8g+Q_t2i!p$q zj*-T@TKm#CnByvEC99XD3#tv%&_EIMY4z{6&kR})o-)?{Y=raQ6XFWPKZS#+Sj}R; zI~8o|%9WbXW+>EMSTn{1$}QbIXC{k%oKxw#t+eO$_&WA%HL_P=gO-|tzwJCDT+Z%W z!a6^2(Xh7X4Hf|JTQRA2iFjlSLg^nL12$?)+JJHf(MRF7`l8B{_ zFKK4)T@0V@zV+83$N97m)+k~UD-#(%L16};zc)0(;ZcUCl zB(wO(!nOMM{1>oWJ^m9*{l8(i{s#+}=~OK}Gd;!H`kQ{dQDXW|a$0(V>RGb3{!Us- zyl!t#PtVi#+1-*atwpP%bCWYcF=Drw@pp5evH2HSoeA^<Nn)jbi?kK7Mq8@%d)tX0@3=EnAh*CUGDa^~jTEc-D%BMesnNnN?&}7-+f-Cl;?BZY#4i87W08)_|NRL>= zJk|fr-x+@surT)MMW$Xb`m#QemXMQzf^?hz9g9kGK@`8kc92p+|3}W-dyJB~|K*(jzoMq>-OT^5IY0XEIX_D=O4K|1$66kN_(D9X82g@LntaNk z#XoG)cEum^--^UPW5rGBe+l^J#FcIjHF^*8NCNV0i-Y8kw!fY)R)-TSgAKXR0AwujxcY zs2=!QXM=Cxw-=V|*XOg9gG7G(>XZ1z@a+M3ni+|UP2E1 zr)&GlQ1zaR?`=4%L4_f246$UaE`JO^Q6rx0Mt^Ikz4M*KpE&Y1dtW=t0tz#a_>V2J zas#=UM+=Q74GSa1AKiKsG?YIh;#GM4v+3XeLIgo*CX?WjaIFRtsnId`nypCDbYv)&5~19i&n;-zLr#J*et`h`=-2daz z8p*qw0qg;sEJiF$=0DB;Pse=T>8n5Y*!t*ec=>pkfJz#4B?C`7@J$?^btsHprw!+ciwL`0d{BN;qyDOC zYY+5leW2%5pk$u!(tn=~2`TT+Yx(5iQvEoz)SRy&cJHjU|M7S^Aj zEZLWw_a`B;zazP~9n(U##)P3F>vgNEtmgM< zs$k8$x9|iE&bZuPu~|?olLVl%xsg^i_ELy7?&|HMdUt8NBV(A{Alj#g-D)$IMxBr) z`hkwEZaoIJ^?=y?+CK$tv}?w*=YIEm2iq*fVhVqTPR7*mq6XJ2HvEZ>48~5*9WF!~ z>a+1LdzQW8v-)`#mXJS%+BV&z=QZ_1ctX)}f>c*x6fM@G6Kru3N~`D@@6!4Jl*24z zi2~E%Xc#N)X~z)yxB7D`>oArQMMpE<=CQ54HX0YkDUPW&10eZclbpR1Q7-63jJB9`8x_hyCb?ZVH&`|OWk41 z3pi8tR!nt>JDwKAyUnCC<<3}pN)d$VYCHjw&KAp@jV+F@I&Aa~H#RJocekeQBWLJ= z`L=Bl{Nq5aBu#heHksm`(A*Ts>FBH-E1?hvi&uNDNhi%`pX6^7kvy79$Pf9ECU8yw_jHVnYY+^ z6*vVQ(yMFF8{fSLSFk|XO(iOq$3HxCdH{9m$qo0%vf;$&iU}MX-f_$oNHHLbc^<5P z1S+-QRWW{S+fMLb4Groop7EMG;{(t14Qw3aJcRjg;G339bIz{xv7Jt7clRX&Rkc$l zoS5EzksOvLk9%jhPN2ARmp0#ZX9>L>!`nWX&yvfVb4QIgAl!P zw+($|ei73j_uJ<&P?PyseWwoA;XvKN8&u%>Ng!fDy@Ea9V}=TIOu@HZtN%k-KA7<1 z#w$mT@nsDYwzJNM7%J~}gs2!$<&^Q*zXpESVJe!VI37|-7ZhV&qHHGc^Q7K{wrLog z9vzx)Lv7byWoS|IOF0XaaBH_)Zr6EckR-K6n&3|DKBKU0U|Gce8G22!DmONMF3~U{ z9M)_jW`lo;Hp%`A8!R|H(8DWz{II6uh9QyuBHL8eZ;YJG5nbRxT?yOYFuo=1cPEO) zG8TB%4wN8(z1IO+Y$I@_UZ{pCh$kF#$dRAH`e09ki&>z3u~#0y&a-FU-q>K=+#h2* zMi>yViTodCuN{N#*1{qER3SLDl)E<%;CV_q#J)k9yoS zhHwX|Z||i{Q0h`t;(c_tcqK#0rq}`6lHX>3zVF)Zr~a=?-PmP zY)jRr!l9j*lb-NKobsC`__?MP$fgTB{P|}^OQX3m8U4{zX{FIVCzqlbe@edATws7N znbu{OiLox|D|{SSqR@(HhnQq*PW4P(hr1J3!L3-x2kY~P6p*`Q48fnKnC1@T*(j=$ z&(EFp&8C+#!hL{1j}RGDy@I&bLp{`;F~A7rPyzwop_KE(NHS`J>ma34ib}TKRnal|W= zg|~ggCNI!av}QBUHflu4Lo=kE7{I{weTS=gKc&v{2trpF zTq*L$7;gTP&1X$rXANidGgD#WgI^bR6=OcfZE$UuxpeY?~#3*=AAatTWi7Sg~$przAn+9jQ8 z;LoZn#f*m2lq(jsA_)MYM$INy-4gDdh;MMhonap?9GjRn4CAeZoAoP|69oqJ9H8ce z+GgtSSB3CIA@M9rN%CuBn1;^L#0q(&OHh4xn;U#$h*kqBHlQ6;Vc>_6$~u?`fdv-cX@V^)&;UA=htNf zO!{T%^&sSLC@fwPZp$5L6Yucs*<|J%Qm9r$u@4YmzQZ%9zVqM#|9Ib`5+2Z*n}wF= z)BMdg#yT|Usq7RdK2l(jE~cf;GgW_IG9b)3TD`;2F);Ze&)%9` z{L&@66}e>QVi1sDlad3YOWF`f;4>8#;W=Zqchd9US*^J;C`C>H{EI_r@ei3TCGNI8R|V+hP6<}fq;Hr(m}<P;j zb)P_&Q^ES(a0DcA`Mw^9+V){06pZF)$1m5 zAUDnJ^9k|dY<9oPZVftB;M1Mmg(}uO9B|U2fQ3_Ch1%fc#8Z??=^hI!28zig@0ptm zeBxFI9`cD`$Wl`Kn<5K611Num7eW3L_72Nl6m@ZhNyaJwDC>Z@*f1LGLhHNZ0@i#Y z@KLC^maaFP^+8SJwimE1E+V?5Zgp0n?_Ys2JeTeAeJcR&khCk(#d8N=P_2V`?&eUa zQdW)C9HU}X=scR@$~U88+>s@MiK=~p8JBs3s0&z^olJ6#U~U<&5<0&AH|n1p11FAW z{8S_gBQ*i!VjzP3e|wVY_U;H2bx6)={Qk~y{0YRfU+{q?6w0m@2f519b2=sywS}u z{ZaCbyAm3o91%y1w41{`PpmxWUW7VTMB<}arJaU{b<(Ms^YBtE!6zpxqk9aH;DpuV z5@#>r>Bqllwub+zrZ$Xs|6_HTrYNSaZ;vuDp+9La^tcYTBaSB=YVd=G_WfU6t4(@u zlyO!^(y1V8&iXHNO#{sQqjJ}D)7oXOfCw$@&_kf&5sZ161ZZAB?|=vEK0m#EhsY+y zIk?ad1{bUUoQ^*I$NhqiL=#^|{UtmKOE^N1 zXG^%!k`9>5LXSPAIZ}p?^US(_XaFGR`H0%Cuymo4)l1k(O3N2W+2-?|iI134E~OQ$ zlGtr6!J-pbrHUOylsT9%$T3RnxV-5^P+56^1y{HO;R8^FwEp71!8~2^NdwJNa0Q{& zX}>dy2f!HIRkhNb0q#thhUY82PHAg_2*saZt;bfXy#2UG#zd?c@_=p^_*Kp)orGJ4 zcXoz*Y)pdO)H$>`W3W0+2XRa?ZX_K1;g+(?`t?a|@V{dBb&peMEwn#B+6K+!%IuM& z6H8fi{SPtt_Y{>DT|+ueAxSAr-+&e(b}4OUL2HCX_oS6pDNRD+o7(Z?r`Jb#m7xw< zTCV{0b9s_n)!bW@I%^yDatW6xM8%?%xi*%p^v?fSKuoIdoh!;!U_>N(0><92F0G}t zM*mp_?vHl_NT?Kx?pfI`K_n8HBo{qnBFYoALT!#0688!QG z&@I-5ohGZ2?zAITkiv|HS1#)j_#`8+1Yw9MUv^e^7gK!pU*bFBSKs;{F^Z5}3CUJk_VHYL|GZN|D+-`a|bw zADy%-Nd1GhKB>mf#y_V~*_XH$?&~qiv)JK}h!3ZU%-za(_y&?s+o62nQQUH!#=y7A z4c$@NuS#2zJG*>=0O2m}RFiauO9W3@o~;+U13etP=v+~?abhrT=5B!qT4$oSvXW=A zRI}bfkbFaiB5J)W-C>%W!934IHjnHzJvV^;=Os%jJLU{kZAEIyUHc!C8-^x^6W{6o zsNVi?Bl!618tHa+>Jf&ukAK~vrEzq@fchDn%+`r3j=iY1SDO`B(>3O)X~B+HI=wR} z&7zW1(6XlUk-@LaM$74fu;g>ji#<)yxtnCLsHm{pI5M>KODapVl)-LhvnUGN@b@)4 zqKjV52d57)9e&})>RjRX(Df8zMKqz3dWQ^eN@cB61^*oM4b@SE-?+bi681B01>0Ql zr<;{wUYbOKhIWw4X6mgRGO};0+T*TeaH@+9;`!G4gdjy@GF2ZusGO?{& zBTcKkFbIx%(XdOT*11ymy%_Y8ow8`x)KkDK`Xi^C#aW`^xI`(t@eHz?eh9r%hS!^> zdQq1x7W{`z5Gh|f_ru&@vzim+Ez!?l`3LKc;4b>ZbC}pS3x#!9uGDfz0aNj?g|8;B zPej%)loqM@S8@w(#_K>O=E!*aBxMonCJwRbCD|o&j%anoJr0qTZ=k>~voihug zoNx;!8gmY~zB@Xr>hji0>{ucaO4c2~-^bh~H(<7jmLdlMq+8;BvoDMxofc4F{I=#v z)gukFC5Id~>c3Y1D{h>T)+_#>55jVA{)Q7gGRr3(;ys-tX(T>z;Fyw6_&Pw5bbY_0(lXeJO#KK|MeBw`he=e5Q zHAAy0w)|OD?sM(z=u&4HhL!D;*5XlLb6qZ~nyi+#0P|~1rg&n4)@rlRb<%^o1tPUF z=|5g6J9l$x%uFC1OTjHu`nQ5p*N}0`_}43&94eh+XsL*vc$fB(+*PQwnp0q!+`=^% z@vf-RJk9<0>OG5kn3e{jAlP}?u|1+fy-SO4vBD|zU!Ax5^z4#R_k;@P7VS#}#>=xr z1rYjpgm*wYgrp&1d>vUM3?>2DkQ2|@Nns`@ElQ9^NJ{nNCmE@uqN}u zt&=aeB9>fb?jo%t@0>>tX1O>;DyP(~p8;CGQTHhA?QQgh;-$DGN3DM|E2znoP3dxm ze7Qn61e;7d6g#SUJ?1vD&%mi9zhclDh81Sh!e%unJ_y-x%cLD*ZMy5WB1#)I%P1;6 zcFH))qF-(PE?BELGfB=9I!5l5X+;8q1uc-xq(slPxqfrUYdOQ+|4Tuw307MI-*Zjn>XZfF~=4%g$F?8>=-mxf(?+ z`i#oWk#$tHc!?_+w^oQ+fd!%I11)(yi|g;wnRt7yvKoZE_ETxS3pq%@Kq8Hmp@5d41$QkQ3>U-)HoLG|Dw_DhQC)Zl`|pM&grPU;%^2qgF;7 zehcCd8(xO+^|6x}+TzF+RmWaPkSbVCsc~>Sy0Qgao=@nEQgEI;h)|!jYxbj;>Z<<0 zHLy80DvvNU1BQ3?CDkLnao2RhcThBvV!>|4lc!!1QeiYHI3$lMPY`f?b?J&+PT2=z zo~wl|<<2P{B(s%)?yT&Tw$D~zEx$b{^N`hPCdDQu4JlzfXJ6q_4PYt{>za`hT^Nm_ zJ2&uQU9`Orz?;qMmD?+tOQSpxj*ky_$!MY*O-9gnu^ za@BdaI13B>WvI3FmfevFT8dYSQU&E0n;s_MGM}%*@=F^IFwLjx6ycz&@BVb3`1~8W zb7dXh9-LKRl9oN+_-RauOm~@iJq!!3JboB;LHiSey2J#R*mq2-;^c{ zErX+LqyO&tw~{C@=IlU)`CtLx%D#UM`$T_Df;mc77nd&S#mux;)`~TvN?%%MX@_`EmNsgctTXl=<7U+G_*;a~5pIFGhIHFMX+6)O9jpcY#blJ1nW zQ9Z_Dl1)k3gRqDFL>U2g)=mq3MAx5+cvk|Qs&u;I(gPL=Ab@#pJNk6dhBz0AM;P`P zV!X+R`g-C%Yo*0O>VH`AvH1+%G#7k%H$AR%UZZpHA5dQ;DAi4(2=W)!43LghIR=@- zt#ppUG~nd^=TJKwX~_`tNJ{b!E=oaB@@j{xukVgoT|b4?k27+g#1r#Eu_S!zyZ? zo<0_U?fj44+Q=_%RnA~?-MkvwSKcH>KJ@D~RC4Yyw|X$M>6JE3>qw2T8cJj zsyf|I$Ga#^yo7C4d;;addmz|~rbJ!ZEz)Se%z^#l=rW#W_!R`ZM8HfOd`?gpWSR!s za%KytcD~BxWZodk)4RgUFD+R7J}lOirCDjNaKz&Q!bdZ6)*R)nM-ao&o0 zl@bBc@=s6-9KW1oF%t$ZONIUYOz-s#b^(a+sdV1qxwy|yaE$1{fh3E> zP7CBKx8u%B`XkRIrsBOztC1ebX%6AbtQ09}$E7+)4 zq1?}Sb+C~v5Z7G8ycD&6J~Pux&Zl#rX4yEYp1i~h^qG^=im%W3yZUVV!h==(3=-r2D!Y0)bM=isO7RHPvtVS!9T$MAHxP_kc3kVLOo^CfG zt%G7}1$IIcO()t7M=ZT`GA{^Wgv%@nLRAgys!~SLtM++A*ZkP>n}W@e1Ewd&x4k0@ zt{K`BQ@gq4xf$eAfHYBS>XW5Hd*UKKYSPsO>eO};kG84&T|xD~0Jlkuku&#XQ+!kZ z`AScrkxcB8;4z}Ys(jZ5e!0b>9UM{5_Vuca&~yU7UOcYVuW>aw2En}6ZLLTT-wV;^ z*-fH5#0$#ymug)yd6QTY^oYrhWSmqs!*MwBs9nc8N_#t&Bzc6@>_C;&e?6By1}M&P1sX&>}nImy_cYGRh3 z_$CqEs?B;xLjlyj*jdJM8=y$t+5PV;E%d+Z_|ko2EZX}U%%z0B3p#QiB~~re4^T}q zow8P6LS`03Q8}^OFi5-XQ0<;VSK)kAMt|h9_30x_ZjjeXO@!wHtDz{O;vI3QmqQQz z^!ok_&nExozp=v^6UO~y7+`bTTah zOY?&eNhRW1B~}zsZ(Kpf5eRkO1rmw&$H>t}@-O0T=(F!3UFTP);`X*uW}tHf@2Zmy zmSpawX$m(Jy&7DRVXwt&05P_w0~esJ&lDL`dP)bcOZM3HH_x7-ZTh)tH+&N)I^C8cv3 z%Fo5c(9)&-OH*ZPB#jDuqI^D02mlsST)e`rVKln2V$4uAX7Bwmm{06K&ymN5cp z)Ro5lK->dk@~jG7mO<>k-(qMbI+~G+md5&zO>|oWC`78SXx4o6WZErLiOEao)nGkb zeniq|XmRw7W-2R?R=9_i-!UNRVy{mdhq|XI6(ecn?0(3fva{I|iz<=KtH@`epO@s} zo1Ok4EryL9rP4QRN7uY7CY1K5RVknQo|I6Aq$ZeOZRs11Ry*I=J2h^j%H7XYKY|w{ z%<5JL;*2CyUsc9Xzm!xd{lz7l5Si-EHSv)z8sPtDZe_fj;D`G#RC7)lKHmc~?xpYZ z!%@$h*Q5P)kJo3(58cXB)8MwW0lgKR!#$s482=91Dhx6PxPw z%;jdS8|H25xx}f@_EY}FqAHfG7ClXP#K8ymFY+~&O=rr->LA~Rbi)SgChy4(G+S@K z`H;M6u#LUfWmi7C?w0b!67KfJqyod8?Q1ba)o==0yzENg<8Nq*UmfFHG|3^t(l=de z(+KtAlp=+FY1;tVlxxpQ-arNd({=$0G7iTa$+L#hTO5V2x;2VR9u`<mL!KoRTEx6wX)=;$vsDpw83xg;~700^)~A#aKNetYq+d-elPFr+FPoM zX5|t{;RwOLxy>4uKADenf00Xlh;lbhf~cc(o@B{n@#4Z|JjT`!bS(Os$w6}-V%Ofzwvo~_BI&hSk@ctivyAfY2ylwQsC zGk%gkf6b6O^IB(C&YQWDuU9pNRMKV}RaE)}nHlKX*ACC4#tR<%Yjs7NCDt1%pXHU! zM{gl#zu0X_u4k_{+I~!49jwmF&z~M1s}ZD{H2ZnS(($BziU08h{SSo+|pf1xRS|DP3B6@cgP*( z;m*!wdQ~(9dwhpHlS~%+YHGgQCBOHtH8Oh0Z1H8ie5@oqe%0uFeQLN8{Fo>~Gi69~ zuB6?cwjw!QJhXlKrR^eZcv=#iotbC4?wRuTOXjqhar*7f`LUUitUj;kb>;JS=7+*rJ=*;G1 z!04-I45lE5n){DjI{XbfL1sU3#5R|ST;E}CTwryhgW~o-; z*r>!v-B44i4Fk3#a{7LF_$XA%D@HZIanTYgWsS^O+V8bF&G@+x2FkWzOKnTM+yDL4 zXhwVJ+t0aO8qQveI0TBm=kC2e!Qsh<3#>$oqt(Q|sjmq$=nr}Z-JjeDc++Ql?9lte z1pB^YQM#%30g*~pwFAT*VCf}mok7mBr{ievNuR`q2MY1%<}&iwt}Hb#-@0Sed{*+4 z!Gm~QYiUbQ0KE_MpEi3qSr09WozX_`;5bV*uii!t_?l0?2e|$9N$c&BlGT4G*?mRE z^}DYFPd$S;rhQg}aPAorDV-K4?UM7`=qcVoq3Oi3PpBC4Mu`|7|8^|mNf1-gR7Dk6 zzk(*|9n@-oq2h~ekeTs1zH3LX1YW(1$rz0*;T=Bies!~a<02}LNkS}Qg{sS(I^5Ai zT(8-!z`>IRCv92l?ZC%j_cg0%-hlMVz*7A;&K2ll65}d1Hyr;=6OF0YiL83S1ef20 zG>LmH)q2EY+o6j!Qyu$3|9IsNIFJD{y3*6X=YVFRYESAniAe#U9n$=t*J?Ux`wH5( zO@wtotvJd?Zc}vr8f}$UUR;CfxJ2#?&R*T>cJW37ZjVUst_DBQ2ynTjSjME2ud4$w zws1CPUa)K+(Y;I7!19a5)5V>0md~<>*WUVV_67ImYXXBV^H&lnoF}?E%8vVAPlMm&*8qk{E@VunQ6+w@(y35zTZ+cn??A*5wy%*LRvSlnFI-X91~y@ zv(Ne^ODVsb0M{euhRNrp)ChoP5z$J`e&!D5wXR&j3)D~A49!|}B;I#WYA&1W6G_kJ z{1l&GbBv?Qch4$u(-f%N3V;Oy*+T`93V#*c^SkzG?(Fv{n%O;o-GS_Re~r~sZ*-T? zw2`fuaVdefIOp0d(At%^>=7vVi_i+dd(2k491=-Km8I?4(&*LYYfZHL81jR&CzOxh z*YHNyV`SW+ifxXA%ZAb2eOp6ZGq*k)LN<52egV099AAyIr|;ep(9pGBrfs+$3TOo0 z6!3)JoXARAh215RpL01nqfZdCov=BVNc}Xr;?7q=Wv&qZ$Ve^71i{YkuR9K z*V?HDB!8dFN%rVp^TI%XZfhDY+&}d5Ae{>F&C;&`I${ICoNJCGL8pwGRf~;Lj2!pK z#WHVcanRtJ7-TPULfd5WH2>6;pxJn{!U-PW(5;pK{&i#eKA@|ek{-8D{*XJ=LdkOC zf$WqLWjFHE=U6v$cQRP5gLd-`e(I{v4~s+uyIKG5 z7=oA-!8#WuP+x0##`)VplXm>=HL$bSvL})eZAc;A<;iDXu{?tu z^Ifix>?u`V7lr#EmV=z=We>CIu9?P$anCS_!Bdc~nc`%Td+#5UgE^Wt8~8{1UBUk6 zGxSCL@)KIvBC)^iJzdY0TO^uF{@j1_fS&; zzexLQQ{a~rE_Yl%w3?xR8-h6oalYeUvKTI*-%^7d2u|1-?D(*FDo$$?souUa^-Nk# zHF%e6pH#mMEf3tA>7JWE0gzI}uFOfu00OHSMBG8iv1ozY{zE&sWE(ab*(hYEplWAjTxErr(EnjdWNF^ql#YejXTtNBP9-z#{KTXua) z0k=%4y3IdK76e;VRn!btv3jP!Ek?7u)6do+B!8jbxVWv^zjKjZnGH%;6u$9M4)9-J zaU(C*YYHO8j@B&p|J5+zw|G~8`DX{(DPzE$#PT#aTQGxgN`LXLx-)unuz9!?txR96 z6pZ!2Sy_O7cY>0;B_;O2jURdA*=|L-$g51xz(Ude=rdc~u+>z*Pj1QGR9=kr8<1&duhObZv6Y?Y zuk_jV;Xn4vv0B8;$-l%*T5;JH8BMtOdHorcB}5Wne9lG|Old#Mr8kYeJt;;6nb{bgbNM;YxoC&7)N3 zg8c^Fc^={8im|G#(~fqNeg*(eGy(^COOtyeDQ|qI5uA(;!`;+)ah(Kp zQ}(soJHRI>2ELHtb>fI;4)9rwqomv%+RcWpE7nJ9stY2vzHy~?kzQl$qKrV;y4gcwSwZ#SMVv@p>2>$*;TG6Xo~X#=WkWK7Ky+0= zcI3|$Zyk}+u?j=1qw&<~)ASl%i0eDD)wf$BORf2`tf_OO;ulFpa7$9-b%JkbsCo0_ zz`ZqvHsM(vhl1qKTv1Jzc%r^+ot4}xrvBAakX&6(9kw?FU;1Lb<_Tl$E8|Ns8+^C0Ynv)e>h$_bU%VKF? zR0#DLZVIJXt}02n(w=nn z;}@6(@Rv;-2pj4t-o&Q#-S&wn(q~csrP0^cljbESmy%Am`VB+I<@(8G+!k?`9y17$ zRYuM+V(H-U0DVUF1C4Oflhw7TCiC!cb_E6e=eXlC8G&*a`<*f z71;@auZt4*xt9N(9%=ZBSe#fm3-nkd%hDTZC!%Bbj@yVwb+AttM$VMP(#(qyrsb-w ze96POz@*=tMc-JkL98FN#)K{SL2nkHrequSt7Myuas65c22cP!cT3eawaD^MxuWsb zPr&6jno63@@JX~D!C*(@mJZL6;r+4exOd1%=T>jvBKfUe>*UEQ2vD#@35*@)#E_yR zSV%8T|MZV;M!eL)vS(CKo-#mkEXoyhd3JIrAKouT0aVdHqX&!(6U(f@EfVwJ6EV1n z2|3;iSjl29-g1Q#%)`^77`;gBipf7cZNk$JQBvb&D&T~9vWP@Rqk|`IBILvp#iAo+ zE1Y@|vOn?jj3Zd>bn_2RP{672vL^!l%*%cq%(GLT9`}_HzS~08`xN0n3)% zd$xm4 zg%HX(-Lp;bI^DqoLq=1)&^&n({Iqyu7ajKv+Co{CMM=ged6(Rup2{Uc+8~xGC5a9P zB9!)5GdsO@>)IlK&MrP(qLaP(&przkfiYIO1*K2~Hgdwh>KQ|>QNun;OI;+){N}_) z*``r#wO#q_AvNj}TMDr$mDF-?7t2vKiUV4f){InQ0m6N+JZd1fz}$70#3pT`<1T~| zt=&4|TvluM;@qh(X%U8VX8GOCoI zUKh9hVq|)}t7-EK(=ewn0#sS(qehd0WW7%vg%I+DZ6=eTo0mk56Kns+~OMgA^P4iWXp;J#`xywd!VT zSwaRGsoB{;(GDy_YC-1A)WavKYYHYU7Cxj>D8W@jH^2E190H`G)Im;pS}8RAcSrgE4=vz^Ik4y&r$TU8cDwod0AX z5kSj1WoJUC#3Tmc4h$%}pS^L6$YxTfUYUp+PZ7|r;^=4;^F*v!D{YXGydc8Fdkl~^wKoSg5T2noq#C0 zSlZ3104rdMjOn2Ksc;b_G$DoBl{s*qU)yQsaI1{CmG9nv zo0=qEw`=9#Ft__;igpYw;Wp6{M-yh}VwP#qQc)z=o{!uV@k%oDX8(6Atu9Z`o8t!M z)NZQJHi9SSyIp$d(da@b@z}yy&ZBvz_)y|=>;Q_Rm#YEE?Y)3I;8MYz9nhPx^aic- zN1JAmi8rV`f6oeCl>4^*qGuIzO z2Ab+jgWf;$*dm73cE{-BE*$iS?c>Yw`o~)0hCVPeG#-rRv6}&%CKx5DN%s^cl?v;G zpL2Ldlb7q7iQ!x&cqqCjSAz^Yd=Xt7$S8S_CA~4d!?h$(Bdm>Vf-MQZY zYgFk=`M`y=R=F41?7he-gy|p6c^ykkY#Tv%b*VWY*t!DLO7pi1s<+y)nY?&!z9WJ= zLra5TR(X5nYvFSI1YH;B`Y6uRmJ#rPqCN*Q`l5|`BxY*m4smq(=+B1tbEfiL`92B_N-M_hU2uF)R2EQ8xd9oHN`Yy=~zfkWv79OLTW zi50?{O_2Odr0?rv>#FPNrW#Ju60*bk%agjho0oKJc}k@=LqNiK=vo+j8$D(uwQ4*e z;>+>2oG$fOh7MGp@Mt|tCi1*6SCqbg7L?7FT5bkfKrU1Cu5L<~o|Xr|+8SS^4D)Z| z+HWj}=(rHiv8(to2BeW4mc2i;$Og~6vF!i|qSuEUo@DsC&VU=`fZ9dD`MtcM?b72a zJy4{r=UL!*jON>Cwm`f3-ZE~2YftQO7=M-DK;-Nv(K$U(QWvd4n~()oF2Zx4Tu;so z&>qI$MR#wy39x{l8`FE$T#03n^j;h~=g{h(2d!C8bhbYQL+(~oE8IMoK46RICrbE# zU)8F0X~r9S4BXf<{3Wy@{*0(31Ha|I)P-e^W1#G^<<#C=oAM_q0g*a(MtekG_+>MS zzcoA{c#W+&(^XuF82V(TeFoYdj*~oTXYOR9Jl9!rpw)BqO;Rnj@6Oxj z1P6);g7Zo0f&i*2Fo;S`JNmV1HB?7F1j9S0WD$N--M_JoC$lyi^)2#h2>ek`*QOtna$xu4% z1M1;*Qh!-&D#X^EVGoe-pMZp=(c zz8?QGtbc}|Fh}cBU;uV<7r>tZPRdg&RX;>g3B1*mW;*CQSgY5F5dl&0z1&P-zYzfH zC#T)KLrneMhrxxw-X^0a%jC-|hk!2skd}C>v_MjwewMYaKRtD#KRs^l7cxtWfFiD{ zBZ}(0uxd>n^Va4KzsE^36k+CcR3^HH1qF7uU`z6-e-7U3wVOj za8TH~sG0;_fDv#jN1Bhil{zFTLYCt9PCC8y;yl)vf|6SmX4XD{$W}o+y~d~Q>&hQ& zlB|NW(FSY6&2)@B_8!2+g$kV;l!U~bbuO?B(7<{@ThFB(d8+YW^WuSop-`1+%wLJq zqbC2_rxGLE2?j3j4Td1OrOs*;365rnE9qo7m6oaa5ck{TPSf;_WM@*V^p)E?{+fwl z^x`f39mL*<)@yb%{f+U#X>Ap|JtY&AZVB&0!#$uT^%;XkmLr^;x|D#2S@DG6HT8nz z-KUOQTEEAKPNJ65&3c9>(aP11Yi;BgVGaxh-v+f6kUsna*L-8tZIp80Kv-x*Gnmy- zKO+`_BOcNH48LZ{4@&omx_w8hEyc99Ygj7sSSyvXFjG`|Q)L!-&A?!u-dZN7)!U6vJm$3FgUz~2dRo>UN#R@i%FqFP={gAAw`v@TlwX?jfuQ80N z($;UK?I6{w3uH}mvgA5FM1M#7Ngx!R4?-WT9~Vu|YuQ;WwEe*MejCJukyjyIOEZ*_ zeK=xU2cQ4GKRc&M@2e~OgvIi4f!P0zenoe7&5|~AnpmBNIS_qfawg|pOABXs|89k%wHOJH#M<#5{$&cn^V z$$%S)q=p(3O7fBa&1! z%!|&A+7=}p67J!PHksvim?X0acQ;jibY2Gl+(&=sy+nLy(tC*=BFrC!(~B+mLP5t0 zM*geaziV~@0HkyWLDI&S8+@5wnu?2~EE@YP^b57c&?mCgObGh?9GJB>8li+AnWt3RYK}im#8c9;)u+#9lih`8-M(cB^A8h zUkkKYVyG*}!^B#(r~fF44|0GrsB9JQnCfIF;=o1>%vQOSG>jg8YGW8ZzfqWK1fRF9 zG~D%_Y_VsTAXjPhNS5pamvrd|Ls=T&7`eIn`QPEy&R=mCdq~bwpQK|Hl8Wc17a;^m zDbQ0rgaYWQIfJBJn>egas0g^ES9$as{%hasaMKHWAR?25(hscm)cOYI7ILxmp9e5u zy`@6cntoVZphHta!{mG32| zNT+D$hJRu=@e7#X?ZxjQpgOJ{-*zywf43bT;LkXrb9JyDB&=9Nx#51jb@%suyLa}_ zz{CFV{QbvN$2^S+w@uYI#mB=&h>Mqwi2@Qvo@ffDhO-La8qS<*r+r`gdw+8%1%}l8(asE`%!@dW^V+m-OoMnBry&&Er z<_slF?8pB%c%daUnny2y@GMyX;STI}IMaF80$u90m(f`!b1+_?`BNN2R(WI_yab<9 zzSWHaN$Q6AXdmY038o=)x--cN^)jvIzJ5qvL4fM;%zn1OQ{zg7g@fg^Zs(dDqj?+Z zPLA4>_~D1UK(rwR2-5EQwj*oR>X3Y9Wzi- z5w9K~u8dw^&Ji0-9aBP+{z5jpGt4$89Yu~T$sCJpkq+|!dM{da*Xx4H6uCJtp)#St zqt4tkZQwjR)h6yaZ^x03p7U=m!rjDcJ+kO!l=`>zcrnfZmg=;FCfpAVCWw^XcFZ09 z2=I6QwXjx~L$gY0x{=QtPKT%?NKy9BA)OOA(^&FJYHIjbdk%Z zoG}r~XG+AGLF^-)B~P%241%b%?_VJI%w-v>^Pbc3udE$M!#;s%*V?Upr(g2W-P^If z@dDA^E=FzyR|QsvYYcv0l%yfg)ux};80#o;hkF+-cxFG=GZEFdBziU@Id$$4k}oX_ zBv|`DYoQ|QGuM$c!(zyDU4qmJ_Qxj*Mynv1xTp=%&&=eUi>K==qn}2iay#CU;)K&~ z^VPU#KPzWA?()j3{?$AuS?8=}=0e99yjGI5T9@CYa9ga%q1Hr`X?M}PNSe0lJ#O%u z?8%Nip0pKqzpcg0(tURt&i~lG@y@LO{lea2dZ?S{p=PC1rT=IDOnpgL(&-7GF~I{4 zNn%3oJ}vR?zoKJDm-#-OyfyPUf=W1#{nBg?t%CYKCZ80v{9S$~;&1Dvki(_%yG#$) z5Q%o}mBD)aK3qk&Y4&G$wLf9Bw|a+8`#IPT^dGw+odh8j3u8?T%%Uk!*Xeq<PCb)TvOra{N&)@{nR#3 z9Rw_yv=VcS3~4Souu$(F@;&0RZEJVl*T{EoWiV&$W=mVfA^zLLP&MmlAQ=0D&>b7g zI{%wlvW_R&(j4ue9I;296_+D@n7Y zQ5&$yDS7eHq-s%2wH&R|jY1N*Q9Untvo#49LC6J5P5qjL zQTSZz36kV5L79hlNK2zVcp1bY!#T+ff5@wgxs<4vXQAB!(B|uN%$GLLYXC(@6vL~q z^sm2yrV*0CHaENkMvre2)A@h&vUnJYb4w=v^rIn8=84WIT`r%CS*|{Zg&s%!3P~=x z!X$s#dXQgB%(%;M_$;A4W{~s#Eq*TaSSzKjEg-xb@#K@O8Z-?Fn>@mTa9u@=K*&49 z+iu_+RK;ZZw3&ANT+chnw;~90SO$~d-?HJ3Eb$iwjwZE#T}T>c0N!zei%fqf_|0-# zU*xIkEYio&U9Os52J8U3*~iZl0O9faXB|lY(u%zFAusW63ENH$=Y1)5;H@yIPvl7t zG^^8BP>y1%22f#?epk*zajb_CneI}VP+c$V2@?@rbQHaM``_<$Sm=J^-%H_rp~%xP zFB7IZl-^kZ`;jqgS#!pmBIM>sA4^0K$K=qHxNF$s-^^c;Smu)-0j3YL$@VNKco{~F zOY~uQJX#^l6KFDfjl7uf7w8YUMp4+&%%*aSjDrBY$64&2J58I)2yTYUqVzhU(K%Ot zIP>5zV) zdSL}|v57lQk=H;)>yfra)C$hhVd$ou+XuB*Ust17m6t_M@nFRqAc+{qtf;>-@gUkk zt(W1;{ZVkH9{CA*Kd3mTKj2q74o|Jd-Tnl^3vt4s{m$_1zyFjm|35QG{_kv`i_~=_zHqf+w$Xjo zG!w^+QOdN9n#5Prgb9;`3Cgt5_iWi8iLWl`=gmdV%MlfWaDzT^IE?j-i^N*66Udv=Kcif=|FFpAW$-h`^CI_ge?k-pc;sundW3nesQ_ zTb%vBu*96?;5t25vNY`5y2cjw`*g!g?-Wy!zFpVOQ_KzAl+4caPUz|9y&tS8s+sQ8 zLR7N6lh?GW#{4D~BQ+U0yS9dq>ij{#vb;eJu*_A#6HyH#LHO8f0(<5UZJ8XNLUJdA zNHnAaLs44mXiuF|k@Q>Ha1Lp*bz2hgs)vn=*`3kR_h>%dfClJSw{YM0r~QAjKv(M1 zpZIVu${BqH>BT13B^Rs-jLM2Rmst3iR2MDrNY0=y3-MCPbDtg=x1K$~%2DVmL zXFBTE7hL^gd?eR}gsOuh7gt&)`qP35+YVU4s&Q?r$%K>xKdEZb@z{Y%Q7@Zsj%ci? zDpv9-Rh`Xt@>6$KRftQJVrp0WE;ySqf03_rdnFXt%+I-?4NqLLRoDy;8uL)ThoSym zDzl*A7w&olajN4FtOR;_JN!>;UjYzDvaSmuK+psYo&bU1?!n#N2Dic8f{@1m=E`o&I<2SV;XTdq}RfMN8W2(6h9 z)D~harlpG!kBL0Q9IF9iu_s34yAly}H^UE;LiHo^^wTCe3Z~1Srq-8ON}^b9`C6L` z^TKaWngj;gV7L?BoF~)TS0`2PZ5^~^^j(jM^?yLm3CXFX`mAm)R4^%UQ9UBHXF47} zM=ILtapWz!!DfaNpZ;~rP;q@ts-Yoo`deR`aiI^BRUQnVf6+BVF20Di_hC+cfYn0T zsapTNIhT!M0CtE_*H_d~t~i>tS%4C%6?JjWA#x-zg2;`p(<&2bocZrH+hkd0N6DXH&i!JwXg5}(xSNj4T z>Q{}hPEZtg?2dFw;)JF~Uhp8>hmDTJl^qO_wWlKm^u4Ty%;4PJVNN6spyen|^YDT6 zZr{Eh%_nE;Y}&bq3nBWvh!3sE{UyM=NZeT9yTw-jvT}vSH-Jc{MLIW#+FS~dHa=Lpi$4 zL{Tt91)a?$kR>Lh_CjaD`Up=p16SWwp5alOTgZg+CaJ6nK7G2=Wu$WAx2La#WNMlY z$#Ez+wX;q(^nK2*Jt}PR9uuJ=`Xf7FySfL8N#LZuGKBW-R;y@#@ZQNuClz zLU(Qe=+QO1VFSfT~aQP}RoXq$8F!{^t zlriPtI7%jW+6N?z0FX{zLVDlOb6I2a<~qT7X&JTJ9h?J;iy3dbp|?o-tJ5~6K52|D zla^PL+xxk5*^TxURdmfBb_j8~UwEGsF-E$Zf1)@d?O5ulFS9b9^tKCliOm1@tWjBj zY`|WoDg)SP(PNNTgs&H%Bs!@28qtsZz0^60wn^!QCNd3GM67HJ;@r-OcR;tjSMuUs z*Nw5$qv`$W2$|Pn!qk`|1`DnYG>p>y%=MdAbXpH*fYKKg;w8iZ! zf&j=1Lw>)6#lfOH2iCF8h9oSLr~n2jP-!x0*>m{`ISEOL-6^HFg4G-hiiRk0JDk+2 zb}EpNIPlmGWH5ycBpVs{*n^pra`zEzq80#vb*z4vz&(bIXTw@&!j)jar_U~N*ksPs zh-{>O7bM-S@pWvWwlUhXbYB)*tm;=RK06T*vk}33xd6lm?U!oK?VANDkbZp|z%Nc; zo-^r@8ToEZnTLL<&dPbBnb|zaQ3l*yVI|PQS3BU4KcJdRStm~hI8(BEmf0!ik{?uq zoI4oh{Ba?sGqK;x;r(QA9--`7O8`f_7;K61nk3gzMif6LfdVIq>zlGyp9({Xd-y&= zGRf|MP~6f}Q!|x+&)I_gY*1_Qib`q8t%d&Z&Bo2u+-^Ovbc@#c>;75CnITTZISJEH z90^8XCh(Mo~i1Z#^c`AOV5`LhbtVRjt_ayfGdpsEVxm=MT z)cp?AI5GqN@DHp+6Jd}L=#oSnb2<@GQzSrur(tqI<+Qjvt%t@z*0w~g6I7N)nPOg- z5Vh0g;8!Uf)wguYLjFZ0(njv$sY7|GE)GbS5VKUnrCOYb6oZQ40J@;htfnF_(IC(i zWUUp)S;0-cih3t(;|Khx0&EejznfDv#g6FWh=nOlh6lGEXa%9PI(6e%56M(ml}9$x zQ!s0FTxSQPwt9UmM9C9hbR6Ru+ug1*ZNBg{UxFcTXqX5+WT@`CeOh-$8(~lgci~!C zD=}m?kQ>X!?+ZpIr<3E#Cac%m4g@Z|*OZ{9uN*zquu-$T4P$+^v&ZPC(&u~TEKR9u zAM5liM-$+{fu?(J>se<;${|x-%0Mu#i<{Yz&{M9ywHL$@V;K}KWDB&Y;ntp6R1Ft8 zQ0vxqUt&``$9v8;iaox-bjy$^(}}T&&zA%3D4rUYO?xk~s7Z#vWR2IAjNBhl)zTMX zsg;$|Ep8o5oN2b7B-{_m)kj3l|hbBCKyh3DPUV8oG*aH6hNf4vE1iVQqrT zg0)9I^#~Huvfp&vWQxEA^;|{tbqC0_YW9j3Z6|hOJF!EQ)v??~z7C3`gpwS!)?-o} zBmm=6;)`Cs!5}r7bLpWc{9+zjZk-j}E}`c^ID6dtZex-KgZL9Yc~#IP0qv1+1%US^ zkrMkO6LlV0g;D@T^1ufq!SC|1UK{9}=DzeXMbVsy5mgkExOeBMITM0YuKnZ^6Hpum zEa~vCXS1C=_q$_e9O&F;RS)1NgIuhG(g^T1VTo5?LL#R?${OgEstz)Pf6N57>_*Ac zJ?;#IGXB6uLh{5)?*uTFu=y%nvxU9K)0+^QgUM#tiCF za$4gi6+AJju2w-kr&YPlIw+kX6-<-$FNl-uArOpem!{{19w>GkL5Xi*L;8Ht2^~e| zshnPiI&`o_*t=S6iNE+ zJIyCKiWT3TGW*45X%04qRGres3#>~ozjElK&DtB33F$squt5{!0-K~p2mIk-NU*Pk z?0Lo*uICuZwAwt;d!&=&VoMjH*50q-x|Bn-!I_}nWaQDa+;A)K1aBv}vuV4L8wpsJ zFKj|2rC}w^3a)qL+ITudQHRdzRfwt6!*H`B2N%#m+`YFkVNg`3cEb_)P!wF(Krkw4 zXW`4p^pK8nah2~0_}HLr<{2F~R;>Jm-lQu88xC_yub@mY-p?+6sD+4`4 zSU(ew%WxP~3>Y?PPJba|&8vvX||i2+RxP?MWj(MsRk zvwu}|0Tl+G<8MqYQ3FvSCd@eal127zbpXM?Hks9y92PD)fdvu0wV}2SUSa>r@I4C! zW^h|4r!Us${cqzhSJ6IKNKxobMR;yON_0bha8z_bw1!1&*zH>LP3n>m)Vwy5&DBR? z5wWl>wdm(%8>F&&x;t)tr&F0IT?R>BQ+^1MQLf<9w3|r_RNft(X#kXET!YtP^gEaQ z8Ie9^#szL4%t_?V-fKj9M#N19T!DzM4#UCmV_qtf&ZBC|cSxspHpqtdZ?fJOd_s~g z;kT-y3O?ba^z^)Z(7!cyA1b7}z+R`?R(Z#ISeyTjo^)?V<&CYTtDJ?#xXMI~6>3s0 z556*_$*Q@)o17dTuYd;?Guptnb!&}>G~h}CMw-k)ABO?oD-TXcr0f#t`J|JK(VJ`v zxOYO?5ZrA|>a^ltj+hmYMqc8OEi0xbt9t8j52iyo^6NxzrG{T(1xvfooLAU#70Hsr z$Zw8MukqF%R149*K_{Bld;J|b6Q|^a`AkWIcAhxDOPXAJA%_vJrf+K1#?6O%qeI7W z!T0tn^4g$5d*KE*GYABHHT}kjTkYCw$f#LHY~wEN`0=hg!ukfu)#dh3-Dz;@f~TJH@#W?H za+1q|$h%#`yzdXEAxjSxJ1Gw{!)~J|A&t8j;1AyB51RW)6WdK!>#pR51FpC4%#fWq z?k{U6=JcCSmfks8-WxtFKOPkwH=Z5t8a*;4-Hk&&Z0s*PNrh5|r`$}v#^HRtjy~Fa z1pj%_)W^=HQJbgraAluXeg3a5nri!}Yx@Et8pPWqLA-~*Ky>gmelT3q2&Co>0!4Yl zN9@~Eqz_|aLFLWKIKj2x3<>!$1zf#lc63t6?uFT|!g^N!^s$p`L(+iEXNPKsMO%tQ zJ380dYNbV^ZDA{iBJqN0MDCu|^~GJ~j~2-d>bv=$BT6!sZpcXPrEfBkk6B%sk&hy7 zoKD1?!EAmPHe4<^4oer@JYIG==#0lyUb6}>+OAp)Nh^6_w@wqF zcY>AN{{6(6HDvn)><)x-1^n&}M@N7i0_7?}V_?h)v|4L{9m=#hT*)f^-7Ai^kP~eb zhwaxCHud7uyDW>TnARSqBm3tpU&TwtG0!2sa+!^Y)eAB15-z4ITL+kqn$(Nc>@s{6 zG8;=cC%o@CRPpgslniTaRxri8Jbodu)pR<;vvJu;dcSdX2Klq*E7%s{ZJ)Fbh4cFVwizsICR=hS_{(>{Ev#p_D&4N2g-o;inHy&x@F&?c289ie<# zgSQ5)tEJ4)65Kt%IVy&G-1>zZs@T5dnj^-Xu^yC^cRG+V!c4H!ugwh+3Kt30qzbl8 z8Tb&75pPH!=KH}Lq8K0aHBHRw*ohbqPbg~9r#xHlEHfdCW%e56E7vCfm1TYr-x44`KmOZAU zDJW!HuN>>9!fCV>L(zPU+}J{19sn_2taP3pf})J}v;C-Tt zuWyqpufeWDu3yreose7ia3UqA-$)XK>D$EOEtxGre*9v_iltnv-Bl`UjN5^e?@jIl zlA@LJk|30LH%!IDW&uHF1}Y|TJ-RT})EM<^y)mF}8D@6$ynz3E%-}k)jAY>=f#ys* zENrVW1fw|J3y@7=s%vL%RP=lO7y3$^rRJn1FXDpHyRhWyhw&CFQcOLA61O}fvx7th zU%&i(jsbczh|gsqTiX)J4PK_H=wfT-uoU+pVUjq*3YiLg2RgI*g6`0xP1`}KGn%&e z)cPb8?W~vih1XEt3HbYOWcQD6-&{qeXG}F_>mSUQ^LmUd^`1Zj239g)h0}@Q@2LXwsUT==-OId~^CSrhK-rf?U3fPB; z!SqHw4!Zb4M`#@9nl>SQNRNtLF$=;_7NUfssae|g>){oX@G~(nQl6iis;;godTeM~ zdOU|2HgmqdK799c=mRrUd(rw7_H1PTyP?n8;7>cfi3CViInLb4X|(-u^!9nVe8(V$Gt0St8IaSjFXhXZv=@T~}At%jJl6Mxy9g`S%Xu-5g- zshLC>6>x?gFP@pEJiI${9ezwX(mGqrNYT-Ax{Z+FxVnAkaycr%ako>U?)tD8!g2LN z^7s(vregXXq3iwOC|^n=W923GHl^$LR)OYgBB?tU>f^rgy)2H$mGR-`t1HIlyE}mt z=etgU6glmlvulANvy#YT`ADszTCp6vsm z{k%c|*64unQ#(=La9&nIJ|P+$QeGiGR(b#MK08sraM^^lQ|WTzG&gyj(Qwf@5*q_C zllTJWN`eq>OX6^5liswTzy#@#(`NhageYP#HdA#cA!T;5u?jZD#*QB0M7 z{Mlf&mp#x+%#t(!1K2!XZ7{?*M>kAvaGLL}xpW(Soe^ve$J=N(1JStp3XGU>7uT*TvR6@C7EfTtV%@%pqT%B7`H zGoqvX%Jq0+TcDp?v*YnzKzj(1^F?gX;m=1c+xL{v_PB9 zam6WBXY`uT^XgD-BO!yd0b*I24_Wr%eS%}^0sNtz@GeK@&W}VuW&i%B?%uCie9tD zQnjKr-8%GTlEDBHCB|N%N+MI~F*3@EN&|8J^jr(uajfKG4-G;#C7>8E2x4bzy*)Ecw&uD7B!$RHn0y- z3Ozy!KtiHTL(_o1KmYX3d9;)>R z5-=#x2$9(6*ysvE?~n7Fav>2v;0YN?%j<_B_6^fWNY8Ogs|a(}InjqrFvKuZi#K)L z?G|k8waM-me#0y(&(P6a%x$zEuLpX8$1N8T<;si^ab(mQXx_`tH!5|C`9n+d)m1gA zkJ3qOd3TD%Pm>!hrP>+Ma0J8NdyvXR=08_`ZEAQ~KxMSJ}ji;-E=a%>~A*C2wHP)}?qj+MsV*v0W{dSov@ zfn%hxL^*A5_J`XrW4>~jUWgBlA<`6ImFKFhjSZ&8at1F!b2&@;1nH7 zDphxD$BeUvg#)6FNP?L2l7tGypLA|II z--#MfggQ@Ft$Y`aje1KiuUJ^A2rQe^3hGn}ut9Ysdq8p|pHen1ozXAr)vD-(4TwXn zAk$TLYQ+kuLp>wgQYI^%F(~WTs_Eno;FBLj;Bt29)(Rj%6(NrXkk!oSSxm0QciIQQ zpmJ=dcajHyp+b_w1F))@v@9mpVmmDY)Xm%0GCPR_#!>aihLjR3YE3K_*SzIqc)x)% zl2ZUws%y1Id$TLV-X;_?I2-31vu^}cwhiChM0pbM>nSJ(9IpBN(ezWrESyA93)B@WzaV6nhUHz zH=}JW#>Km8$hQSQmyFe??bR(GL{7+K7&p$IA1IA8pcOf4Y%@+^X0S5ZnjNSCXjF{o zh7D>Z5R_9ON6(Q}08A^|bh8FY5+)gpjHl)bD~vqh*$WM)l{E_n84?Z|J{jB2v6k5z zo(*i(4LTD;dOz&11GC z(o5ym^PzxO#ja|*MI|ZD0&9!5MkNaY-Hzq` zDbPH2gKAdOzg^bH03IKkpP8N;zWfnR3hj`pOS7<@$>#(f35$@0%v^D&RIr#&v#ed) z=OerXb~5vVRqgS&mesU&e4ocQE1$RU3RuM~Dps{e=IyK5?c_f5L-X0*wbXZDpRtZu z5-phy&AV0$+nIgN;NN0Vu-qw7FssY8JFJ z_#DE2!m?#Ku$(#kwmiFDI<1+~?&1SB#)t*ajAhHTW?q?1#3e7_{@6e=qv%!%Zb&p|-nH!8tn5esZZgibfWktYXcKox(+w={XN|e0fKfrM zC$j{QeOK1>>z9rJQuM19HzZmyA6vF=ruP#7;K~*1RV?cEEC+!B{qiv;6!WUKwFi+^ z>81U^ntq%Z6be#R`MN^er#TZ-z$9g!%J&W2;il00stmQp*0USsv%r#m_LzGLNEP_L za0R9f%b88zeyNyl3JI0udR8m@qf_pD&eg1mkbY7C1CA9bPse$W+r1q) z(dmp70oM>v956Nrd)Uh*e&zv5w6(HU?Tl{1kijSSu68g!n>l*2n6NhS5LKK8wrnAI zR$aS+^Vm)5dF|kCZm&9=GuACfve0*%#5e(L(RO4PiiES$ibrK_+O^%ULc(w|Sos|T zPtf+MhYFZ2b2Vz)>_VV$kk|hvs#LRfs&_F)(LKdtQucISt(;o-rR=wQi>`=z58;8#ztg6rKT&D_Zq3 zh6z&!*_A(hJSAMvOBl9I!Dj#DY>~ty>kQum{hfKkYG&JaSSm%E zz1vy52T_t~*Q#&(%P>!h0DFjY@)hfa{nnYvlzXG%Y2BLDNdL>lHTp>7tS>Nkz~T$X}moKF*OFn9Mrv0t!2SVQk%jS!?7we+~< zKxe$*hYW;5dq>5Ps?7P_4F{AB%mJKLW(RxZV=76i8E3QG1E}MRONc^fMeGS=X4WS+ z(lBYMce?<^e8B*L^=bySlubjRh1nkPrj%I2swLX32=w;FCFBA03^o<+N@H7xn-FLO zSoTX%xL8ywHVyuk0XIPFJdqC!bOqSf%K^C9k0Oj3TrF*Ge4qhf(JxgZP2ZN1Y1p^; zyG4Og0?{EGq3y805w3)D--g2DV)L+gS;Oxlj?rf-3w{s9!Hq$2@IHs$#UA^(q11BW zh6&0DW(vLlZy1c#)QaNu3G_4A5%|JuyEoscR-{_i-9AdgfGVIQgR{Q2!|i{sBiu6) zYJj`WL+B|ATbDsaoJo>tCfFSMfD6VA;SCFojz#mKkl3=efQn8>vFBST0xmv}u2 zIyMcVj&Tp0mj&)PkFMtcEHj!p(TaIXm|F&D_XiH>ZZPqeiLgv4<}53sEr2}yyht|~ zvn(teB9%x2Y;)!n{{@q}h0s}CEAB1tfe1RHf!BaHWK*w&=ohc+NGEYS z5vf_UL_21DEf78efxNJOXc*LFT6P`2zF`RK_=LQoflrYuFVhZ=vavaKL=8rO__PQcptX>m{DbSvH%-(mzk$tAeJ#`DR)fyW+C+BMGF-ADx#NBta-QH_ra8r$Q-_ytKY{{YF~3ScEt-W;cupzbk`9rvj4&jTe&?`p(MElC zwEvaG03rg1CxCEGfD#GMWu=_EBLt8=L4&}d2*6wGCI+fdtZjdcz;iJymyXOTe|$%J zBQDp#a)&;TV%rnIwmSjcfO1K65S2|9lW)!mib_fr7Q+VJj~q=}D5;oNDy&(xphjR@ z_$VQ8qs>0@EyQ&2DRn0>7p&)_X$d5<1>Y2eA)$F z3_*t&w{(|AtaA3*;jLD?#w3(k_IZuc_C=@sSzq}fgMu-tb~8r-83FtXL8cgs^bY#x z6FXU>$_2!DJ((f=QC+4%Y#5I8H-_iqJ0;Yd+NoX5I-y+}LAV$^^xg)r(`aeb(%O+- z`ayVIf?(5VIn-*}?p?e=LKqSB+4^MDOZ*6xT3%hkL2?)c^i%qUleK9UGVA4CC_z&y zleO6vs_V&JU_mk%IrKyNiIYq@quys2Z|NuuRmN(wER@%SyHta~Fd*sYECts~yAXmh zFuLi2jS@$g@+{QX<=_yx;5!?q>@r5xMtDYKMz}_#Mufhptb6f8QPMX>UXbk&?oim} zo(Qg&bs^;;1xY6hA@ocU?~v>eD3ZmJ#p1=1#1b0%zv_#nh{Z+2M~UB&3Vmb%|*>s%(*3dgj$JP4O-D#QCsC(8Cq>y;ai6=@ZC`l z5D!of5DridkPhBD5!(^ik=x>{?IlrjN_zB#3`b!eKccmC1<_H%mfi z8MpS{gB?dNrsdYld|>cjkwDL)Rnl`G;!P4_8Mh(a7afhur{&+C51d5Lp;gNX5szvF zAKA?rSJ!hJ;!HZG64WaldXt3B^2x?_*LZX`zd}r}Vd(R0PKAPA+7Mn+CyR)6^d8wb zptyp&gc+}AQ(~s5LO?Hjh%9NA#mahXZ(uyGq=Iuhp>P6_RiUI8G^CVd!?JI~uwy(i zn|TJGV3WMxIAWi7#-H=GRV+!FrN-uHho0}{@>=l_TheX$1l;95)(CU1mEd;i5JJ*t z=6oyB?b;!zqzINAYw`WWuyU#^>1|rI-Nq^V)HBKLup#ZFk1P^4$-9ZpqEp64vXeIO*giwa9#xSOxG5yAP zKs94f+OJX5isSJW;uba-Gm(nPpfL!~E=2|+2d)@Hg}PS5d~_wS72sh7u?_bSEdmh( zJWPP4MK`5s)5+{5iWnu-7Io=Vi~tZ~i?PI6Gwf=OO=czteU88(bd`{J8cg{rMwlQ< z9cM^0xDnrKryGCQfna#Z*5nl*BL?IGj zspCv(+BYa;4ax^~s(Y~_>Ilz7w-U&x0Q%)U8qdI->slSl*$w|*IlwL9w3un^0$m-@a^AR1#D@Lu+@q^= z@|eKo!qK8+*%c+y`u{<8ZpP^TrPCsUO^ok#pq$JAl35d zdlR(u`jX9hseaV}k! z+z9ycF)mw{o@<_OOVeD7=b}%=)PazZO@{KhM8qWbiVSb$=$$qv_ZB5YpikywZviKZ zk|JUfk|LBT3%KNy<^Xko3-9KXloW*=Q(N5(2XN*KOoo9Hg%PR}fXml|M(?O$`T`e> zr}FhZn@ni3!BZJ|kA1f~vh0FQC%XNwBD_uadlBEJ`@INl)BRq=w&{K?_beYN2a07# zGi*(DZHwGl08*iGDKY4}x;h(tRc(QH3h#6`McAz{H&ApKixW~JqUKXI7YFL=0N}IC zHV+Qjn)D*BJQe4Rbicfqjj|?<|5jvThlm5mxLE2+nXog5N$$8cZFDZ9XZRwe~gLGATgazTnZzHC`cx<%OL89azQV?Ums^*MVp>T7>7Q4akj5B z**D}zW2mPG6p&gRU_rkyLYaWh;&p#tAdjqD&3saDx8Pk@2e=;G*PS*RW1?iNaJfG? zsK2m0E8pv>IqPKPy`etQN>aXEAGOTu-#OrgZi1PZ+y=F&1M}ackcoy)_Jkp*K`PdwXYl(Iid)3K=0lv@BMax_#|U06tl8nIve@ZlQ#p zGt2%sAZ6&rdMc-aJ?Y|q>{8uDf=MQxgize(^aC%z$t{z_-x-jTECFWsHLiB!#AcM= z+y~x?M-Vf*_-$At#%jJ`+3bkuuWvaePnFXcMU^Bi<%ao?(n3PE0{X>9?mZ~DJR_*W zt3uWnLPU}k;7)jw>n{y zL)E}8#o?3>2x=kJpmhWBC2uKeF#(}bH3l<65at7wTjuz?+ z54LPt^y*9rwl8S-ba?;zh_8l_Hd|$ku0DetgiUmysI@Zb{33z9%i`G7-hIXVchh;=3j2s8*K;hPfQ8kinbWeP z{Go4StEOg7c7qSI*r(BXgRT7=x+-&Qw^gdVIkWBgcbPb)M~RnO31+}vs0LJ^<_*su`ncc`xQfpcSSKZkZFur3YOQ_i}X@{>XU@6t%hr*mMm(K3rp|X;;|pgz6`2 z>|v|Mo%I00&i6KRLQm^IC#OMlYI=VVZ3BLPJo8L-u1tX+NwN+-O6Rd=T;%yaw)*DU zw1KK^05f^J;mi7r7&2GIQD?|k3C#^33(tjPB`9_SWbmSe5HR)Sl{;!i3g6`%ZjLLG z6%4a>I=gERj>DQtAydL!oaCN+u*Y_=$ZH&BV_|41Tp~sAniV$>_t)qvJ!GCtmm|U> zebD@m!m3F^=8L@jq%%z0DAg+0zR9(kbi4eE?w|J-YM=s~ZKMF2(9C*e}o+=&b zk3n^p_w?LHkHa@Q%&SHQeh{N&P&V!FKal^%(a2Rj)HP$Hh=dBEsf|r;5SKQGGlaq9 zA7v7(wF`HB>Q!(P%T#faR&YMnHxMkwGn~km#(}16!DdBcP!gu7#`e8lQ^(p}e8F3_ zF2d1o(H6gA(~JK|i~5!JnX6rpvFg_mNqjS+{&+9sQ2FapZhqA{2G0zPp{_nKLZ#|c zZZHKOXv|?JafPXu7o`Xod6*1JY9FyoD8#$t$MSKawwaCw*@uh2hF2dZ(!RFgN0_Uev#C7W_CtBrS2+m%ZOf8SQG7>|*#$ zT<@N|UviIKm4McB16AkVq^Cn!Ph?L8z~+>f>w`tAAn(5xmUHdy)RX3{fPDO*;^4>@ z4EZ3NDMD0Ik%hlDDbVj~1;-3%GHbo2-CXOSTBT^Evr6c1p!j&SG58`6Nm0DX` zTFzGw&SzYSb?d~fs;scGVVc)ZiN(YRMd;~$pW>jQwk-@iD2X;u1NrF*BCR#Ef$K$3 zXO{S=TitiCuz|3-!m)!#)S3uY_1=7uUcgfdU2h@Lh&Z$v#(3&wdR?vf>V$r2j6Th~ z+mwE8#4dY>*nX@b=J_EY#I`Z>AdsZDX)f@ZC6rc=A;#~+t%I-VnI46<5Pxp{!C@kQ z`0Rm5>z?~tD-2-@Z_C14X1EXbb;xe{XmP=2x{Q%N%zBHTDZl`^BwNeK}NlL{k9J4Lzb8@JnaHp z4;p0}Wp-s~Ob5w9W34V!_muC{w6%4#Oy6kB%MRkY2MC7C!#%<_drWP?Y_b>m1Ieb< zD`ksDP3kn9atwtXmVJS*ROKC;?DiwWbuG}w< zODOKkmy|DX=r4+H_6Hlsd{6Q}(Dr}VRRgD%4O%$R8;om?rz)!!+kR0u%bK4WWPyJM zwUtHoT@kItEpN-MYvV1fq{jKs2Kx_2MHC(;u-UI#5CH-L(*0C-3X8lnTWbE=CoZFxlA_cMV0O?S#M2H0B9+5Cv@4&D& z0}b7lS3pd(Znm!%p*%81j6yV|oIFc|Uc+@3>K3LQ3(iRFojWLW?Za&y*e?Qsexe2f zg8d|rU*7!x4bMLj$Y)CfYaoTOnYPg{@_>Gl#dH0GJf3fq`xX3I8$W`x{ylJOOHETN zV>3O5@QN&qhI6T|6;k?8X#aM z5HD+?|HNRJ{tg#)D{D<_;II06wvd10HUB#MXA@<|JHn%Rawy%W%KZ-zf59cErm(X9 zGYkF2W0h3G6~p^LKm#S?dxawcE)*1W)r1Ybio=rAl2=Qr%O}#;2JRa1$u;1HuEF!{ z4?FV+2;%H_JDUl=MC}p0%_58jLFvo^iMm3IkHtVsz>GtWPe@P{^~voa9TXEL_aBxB zpcE$W=@k_xv!#mlr;tkwQ;d{8(3pV=BO)IZg}V4A{u=Jv5Y%hf0&9jK5*n%yK~gC5 z#1~SaHkcg`6UdDCU_4+vBK^yUb$;OB?;#(q`kCfC2YocD(S>`9d`_q z&PRc_OuOUTT0V%wvfXkKym|x4^xF14hFhVv02xA&&SAgT21;$I9rU|*y=O~D>U2ip)r3tbnyRn=wJJtzXI^b$c_CI!1|v9P(AtHlX#&1T+)Ax&mVq* zV0rSzKkYkz_wpyCf9v7TI(?33uRxxW;9Dy%wVsYy`&21^M*;!K{1W>Yk@Sqsv^0&$ z>3(tVKLh>ueILQ^pwEV+4fY>S@3++8uSoKb%A)=Ws_m~r{g{RP0{d^f*MGUnA7N>K zf+c^Nn)9a${Ot^Xg#G`P^M3?p`3X4w?*ad4PyZ|aulDqO;;4QcbJ|bV<@8i}f5(5e zCwTC`b>jaY|JOc*>Ng|%Uzz;-9EbULCeKE8{P&prqL5!*_&+$-$0tDmGe@^cx;jrC>653S|8IfhS9LtIhz{Jw(|uBf!BhR214)2FE!e~m>$i+VKU6N^v(Ll%}U|4aTwx8na`f$#c{S^Q;L1XD#ZK8Jm*7a$-UKUwF-KV@NM@K;5l@V;jM zkp~1M)$)0u;kSI^*+rVa{4-Igi`r-dO@Z`O>QoduX4-$1(9f%U{YZ$X>%U0ouN%wH zvv_|bbUyXpB=nc<=I6Wvc04bEluDeEmphck#bR=$8p#e~r-3qh~)7 zq67u~M-lf&qqa79x`zDEgq}0JpGRJPB;*P94+#A<4)et1f9(?gecH$KlfOs3{0B^a z8~}MD@_$SXSbrz-oEjj4|1BcFjV=6#?BHL?$ v$185a-=*-}LvFqL4?KRj!hgd0x0mGLPl+f9$or??rPm-J{7_H%0LcFXtQv6k literal 0 HcmV?d00001 diff --git a/core/src/test/resources/indices/bwc/repo-2.0.2.zip b/core/src/test/resources/indices/bwc/repo-2.0.2.zip new file mode 100644 index 0000000000000000000000000000000000000000..696ffd939d59093e61da55a96898e4431639daf0 GIT binary patch literal 81369 zcmd43byQrHs3F^;ZUqa%?e<=Uiz(Wy3xd5FUSv1t& zLc#i;xM=>l+`W*Ypy6(zp`hS!em|)EKR#}&8xw*5d-TtR^j{yhb}$Ee{VN;36s`^? zPOestZmtykdK7LzS2rUz3IqTDwhj4bZ>^SdYT`y&0f2#5=0Z3PYPzC%&ISnB$P^pDK` zw>#1N-%;j%C%ppZZ@9zuZ{0af2IXdvs3vC+Ch7k62b`9q3+e6c{h~ypV?`OW5Z_0n zR?x-K)ZgYO3sSpQRY2WFJq=z-Uk?!v8r8 zMm)*4Iu1lQu;ZHqA{~q$?XxGE8#xa85e-{+j{})ThP&5dAKKZ}emW>pj@V%wl(4B? zm@0~o44leA*uSl3AtZI`BZbb>SN3Q=S|=B0CV_IjI1?Oq zS;0j)Ry=SDZOVX#LV!I>3_E_eEzVcA!Q0gs5jhPUnGi2{AvUY56`R#v9 z6{jaP3z)x56|)#M6}{h61sm>fOcnNjHC5a}GP3=~P*T_m1IGmnAxH+1kvO>J;>~dj zCD+M{;DPWkLMIiR`gPwoU6Jn+av*V}kYc%ftnS?mIBXO@pBA`5Gbprm zi>sM&Z&zGA;$P1ratn2sR%n;uIiguF&~uA%n6YnHXg%Uy&lhlu_%W^YH0heI5YUr5 z%<6H+;BV+ z_+!og=h5lY_R~y1sP(;2cemHW*3I)t{J}ZP`L@gR7f+9;=Wjbb=K+D~9M-l8NlnS<$2_jz7K;NAe>H5sRk47=8QWXho;u zj5U7k7{%_R9Tsz$+&74rD0@4`Pxp!0x6$M}kFcrwJB8<{W}@#K-km{K$j=R4HrBF{1XSylx~|G`g*>qVuXg3`9ef~1Y~&f;S$ktJrP z+>IhfI_$2$<w~cQS`OZj~Gr-~s<=wJI%s3hrEp1t81UDt#ZXlL< zxDtzqs?G7dl&4=E+@x%Bvfbt>({N0B_M35T>SWp5XRiB_X%M@nQ`cv}k?kTIt zEahbI-&Tmu#r9jgEdr{>w7>nTPLintrimz4IF|P;twnuwT;dvo!i_uN%70hy4x|$L>sbj7^oSpTdqVh6>yfr zNUD^RVe-R@z9}S)UE%kAkyG14{&7mUd#UOezT-p=+1V-$I@=$Ct>#5Vt*c>mV4$Z>SrF1uqX0IrOWRyFdWK>gM6>ffLvR6J- zJ|P{{FA5v#y*($=aOoY@Rz8r$IShQmNz-E-6k^cfXA1p4_oflbN43^JvHT4w{w)yZ z{yH0l;c!TBrX&k_pAbAe1xG;#} z+Bn3M3`dSz+V^c(|1nxhXWx5tSFz4uLs=apiF5@*hWF7FMhQkJDJ(&w9}A8mbVog~ z#5j=hhSSW&ZtK5?jaCL_p*JV*u*@sls*7KP$osPJlpl_)K8Z2!38LqN+t94!1E4j= zE?Bs(>HjjdJQ?oWAn;I7E9ieC96bLj91~gwk$RFiIbZ=92M=tX$GIPYJQl^)pKc3{@jmq#Cfo|OHk-JqzDx^d z7PAns$a{c%r@sKf^H!};mN%EnY=dlKy;HR+&wJF5vYHR-EjAi9)5Wy;g~kg?spE<_ zp3-u$gL#Fzl{jOiK_pQ_l(iANAt~=QO-GW0E#A2n9}^R*B=gPJ647O-q7>QOUm)3V zV35w_p9dj~mlaHrbH&jfD6i%sqSAcM58h>)#}@wd=>``EU#85JRaH;aSM3NFN5d?_ zX%$I~^0Fy7+W=`P{S0ia7jU_e@S;;TpBlGj6Tvvc`N-IB3=+;b++xp43 ztQ&%eoQ}|%xLbF59Q~WVQh6edwAd=7y|aJj>ClKlCoVA@$+Wv7*+`((^WBS$G7&F` zK_RplNTohyhi5pjDU9Jx9B`TVq;V1xGeAvTxa?Ce;vrI(<-c5|9;2xfV$U6Rj@d}~ zxuN-{WQWkFQMwS$H_XZ>*XMgJ!s?l$9_n-4;ho^HO$$HYi01qw8RN0LOEJ2P00vx} zs>}v7B>0qK$6~B)&SIelR%Kt2t5S=?sA?=7Rc?bZv5mbX!W5k%u4sqTAeAqP_ttp@ za^#>G!I!;z16S%j3gInIZ*L$x$Gu9Fo+Mxy*ieWzH~;PxrX=I!lRmc$j37NY9~vUz zKUdE0jSV|QDCaPBufHEJ!KpFq;O;+N;Zte2VgabjyF$WNP1^NY)28wG)_^M93K=+^kQnnXE88nXDm}nXF>(GTCZ)Ol}Mu`tWV4@(BibF5cWDy^y}rptuj& z7an@9FD{)zEZo)AZ0q$1HSx{fT(CKhp?uCpW(A*}0QO3NS-!_TmX?aktv`NIj^SK4 zKl0YURLi01ebtofC}G(d0vKb(gS;}nx)UybG?P$?EZBjQDdQ~f^<|e#BGljts!&u# zlCh>r86;c?lG8Df<}#MGtI45i7mA+wg)U0uyOuj1HDnS`$}0kA0!NLGQ-RW-h_zk+ z==by8s*=qeYa;g`H(9o$i2!4?%hz&|Q=5F}##PU{$Cod@dza>CX_D8%ED5`5d7}gD zZDFfZTT%mzK{O(|P}#6tG$Q&?l0jjy-nK@~&ru@n1;e48!v~i;^qnGfDVl5x-Ko0$ zUY*$jNc~+qp3Yyx?>?PxKi6D7iUaI;wLcG7rNcVL!Y*(228M;=v6bv$LMNek4Z&)A z3|A>gr-|c45IYYyiGLRJBE*WW`xx>8t#OFSm)J7?A)w5g#MX`qjxiU4DemEZti^j_ zZ)A1%9{(qUsho>RF0$vB3x{{49$PZ+gtq#;`3B?8+-qQMg3zkBdIHk3}(HDj!V`<{fr@&Xw@M#}MG|q<|@7@&pm7D#gpEvl`qzj@Z9q zE;|SQ7&30pgw`1xcU_Jj-nGNn;PI)=$P3d)fsSa)YR%P04+oxl@ILwpR`0sx_FHK$ zY}BLgqCp(&HB_yIfVl$A&-Z5nb9eVfaGpX$?zOLyTM;9yw_g4YJ;huYer3$_8#Bwz zww|6!jz+J&n>pmUZ=5?`jc~?k$)k*;c;CMm*~KLO$PPF}&++UrGU&Q_2>hS>6u-*f*cf>K)y6QN`6hv6F(s$xM|UHDA+Njj_nnWB@Ba4_cN;H=%HMFivYVCtj+VdY;$O|3 zKeaEQXP>{koOuR5IyBr=&(@AsB5^qOEWY%|Z#}o0lP*-PFwYf??dBjs!$GRL~G$RY9lY6G288w|Deh0j?QjBbn;G~t*Vb2(o z)bZxjifo9ss=)!K%;-~(R;;Ab=+Z57QpTuWJupB|q$F_f&ghO524}4*r=^B9mGWa=Dia1l8xNF0i&z_0(xv z%cy)y8>NWG)^!_}D3_Knry7-@prFLWj*5(x(5Iqc$9kJ6y+(3= zizO+h!-SuYn@EKT)bA{+d1PZ6NA>pMiWLaIF*_P4SK99soSI*6pK_xjpHO9RF@mPY@milpBO*nXmrh!?n-Yqt$yi|DxKV|2kIxYuTRYVDWRSN9^cqAL|B1 z!594)Gz~+BQA+mFF$r%8DR24YkLJlkl7CdOKPv4%)HXUL11<7DPciZ4l8#fc&S|mb_HkPntqqro@|@QtuBE(3G~J&{BySr*%rOmE6GR88T(*YEb}Fr! zlGZy}=9S#2-6|WbJesOJWp)}obj;RSYPgKUHVb*Y9i5UY)eSW8COR@PA({i>%{$D2 zO!PJL1>)+d(W8;EqwFdKi7EuxDVPK)&VN)e#vfJYkE#P;pk!xYlmJRd0gKDzx74FX zBexk8KHF%NmqjaAj!I~R>EdE*VA^Qtt4o$?yj8)?*QA#GJ*WYwpqQ^f`qS5lu0U+A zkX4t&X;nTRMv7glSB&0oxmu8z-O!y~)QmbDPdtZZP_;;6RLE;_TTp%h*psO&q%jOM zsZn*g)UcctM16f4iM@v;ji%n7By&4x&bK%}^Pgq#j6JHZe)2xc;AE6gXg} zt_V=n0cC9x;oiEeaM>h3S^~qH$5wYEjI7gGP>TXmIg?L$Ms@MDQn(qc7V?13F~26R z$7CBi7`Ll)dtSRAzD1k`zCu>dj~)GQlzM!%I&UBEHtIKiMR*F|Uv&NK!R@j7{^Ea^ z{j=wF;PHI430(2F{yXb2wOAYc7-ir=6!EHoqi(3jg<`MqkF1Px5g&1dQ|znebd z48o?NRjA2aWA#qUq-m9>(UY%FH)fkLO_EfKF~sX)4v={t(~bAc^h*z=4&RDx*tBi{ zBW7ryhnqoGpPOO1(A+mETZWr@Io_Nx$v~Q$Y}q$C?8d=*b+*1(M=m!Nk9l2(u<^#+ zdPr8ay^hoPsB6}vct^G|MTn7Ev%Qk@+b1Kh>0GUz2gaOqL9U0TUGRu2NJ-u>y_54{ zewPDebUj)WD)})+5igs$UE8hs&|+r2AY#B5r5WFXZOt@nKsrVRuaJ3AJGL3$^7%{Q zjr39cEMYODta!2^oxL)D9;fh;@9b4F(vjQjZc&enEvSYDRlx_`#CBvn+nFp&A5iW~ z?_KUe?^frpdGDstSPHmp@F1fqduy}(0Kb!_~P|;PNqEIT9{*F zJAFVi#vX5#*+cuZiPy?`c)M=EG2Mo3!nAz=B?gSw$Yh~6UY~BuHnr`)ug6rUZB;*P zT0ejlQ;avpl&V*iuts0iqz$ZJF^wAl#H{1_GX2yguIIH`z`*TPd$vGZ$Ni~u+RAHO z|IIXez$XS3kBFID$FAjGphY{j6>Q8gW05prp1Hy8Z})5ErtyLK*&bmDrHR0tV`*P4 zW{SyB$F1(jY7vK^sT8DOnF-)Fuy0>>YdtcVn=471aE|%z+_zmcKp!)QXUR0Fo7Gs> z!t3Y^**w)hX7wN@|V%jxYzPOcbQJSAqK7B7p-?_Lg^X|u$3QY*0y1op1I`*{=Z zdCgsy_9G`uGpV`d>{FMST6IhsXKTnA61vFJUMCqc7r1ThzO9_rEn7E^S+q`&W_EGE zhWc|q+5?ud8p~Vw92+Msx+V}Z@AkZTfk-V(6J(LRv)@cEfu_wDywsQP3~SaR3e z@$>38wVAsv??+E)WCreLOt@zfar4+aEU&e+QQn%lPVIM32w(Q@=TA^(4sk2l<*fMG zU61d#HK~<;D%)H?<~g}C0-v%Dm{jz0Mo6N?kl^u;yQd$pY74o9lcR}~eBwj##NT3_ zHA(KbieN)C1B%kcy5gU+LQD$!{UYGe=144f$KBHR<`GGTTNXu4TKb72e9`Vm6nV4U zc=w!EwqpBrB0eJ>5R>u>y76v0O>ZUin?z8eeI)rTgGxv2S?0le?j-sxte-k!3{8u; zhF{WR-^ zj2-h_geA*|FkQx%X+^fMgKyzu*4FXnv^N$FvcEntn-ptY-Q? zdoM6=j4TZT5ZC|Ij^auH@OC07WSs>jkGp}-*oXE$j~S$$0R;3t+G`7m2N_OXmd_f7 zfVE>nY0rRGeTNQWCqB9pv8{wLlQc@eM}yC8`mSw1T>AHZzmU`6LP|5J`J(R*K5ys+ z=8sXN4FKfyQ%5sYwf7s=fZ=1ZX&Qh^gKuA|+;t8bx)DambEA-m^ts#ZT$WE827r}g z+-W-iT7AXQZ7tqCmzBNPF`YD5zyjdF0PscM?Ll5vJil;=GHnQ;q@UC7cX~Cx*EYtO zrnHtp5h0|C24FD|azEO1p4#gkgH4OL;`3&@JFJ`@DQx`sF206zU9jEv=Hl>ZW~{LF zqj0zoy8;5~^9KYklpFkAwk5O3A?0)TWbwdo6Vm<?NkiuG^rUJZO1$OMi0Q`fv^VRg%@T)Hz0`^{=##>PIX2T3w>~yXic=lkoejOxb{f^Vx^^n)l3kaHnbr7(YR(C)na+cWo0*{E6P~ZfUE2&Ma-n zA^roY?}uOaH@9uuY#+BOhPdKSNO=SueAceMoH-Be)Fj9I6(D8%x8J+nww+)0?&RM< z#}APz3FdUCx}=w0)DLmQAE@35b@}-``fXqK?Gz7{l3wEqfe<$5CQ3V$&~vp&N&NVK zxm|&e8bwS}r|fe1gw*^?9^~)9`I^;1iB1%WkE>TxcO!e}o5=)H~)LfqRr&f_;uP%djcwTos{0|CV2NpwcAZ zeq+C7=@cM$UC7t(=daW2<#X5GT6&lCBB$wt1f}LF?_6Y|cm99}{hvDfjcb+-eMq?} zLY01+k5!jCCykSqT~i3TaY@wS3PPFwE%$3zd`GUM2TfB1xs5^xPm3yOYPb%M#6S7A zUFQ$dr<`-Kg?RiNWI+jg%nE%68B^{$@X(>KpWytCufg--N)bUvS978wr9qR>1~Bai zZpeqE(=2&`y+J_)4bbp6XouL-Je!}V403&ERs2Zr1*=3|=uAucD07E|l>Bmj`JG;m zAGC2I=DrhR@wcGAK|CawW;8bL%?o;fMux#dU_#O+X4UhU3o3>lgGqf;g{VzX$6)N< zn-SD3s*u>P`cTfY>DK!*hy+>y)&u?&Z3(-M$2hk$Fx(7!2v!j;3k9E)mBQG(w=8G| z+HCg-XAmJ48UH;ixv^<)QP2pq8cYd-JhI_OzAEK(03)}tMK99>E9^epIBNPkCqSOA zkbF1?v?c7%H^hj%_)c`2PQ8gih0rIvg__#$>&T7GdP{;Pp!Hx{5bTh>aHq-5o9E@E zOA{u^4a6J}){wq1yJI@>Y#J)N?kdM~A>F}}4%?u6<6qEiI`@KtN}=Ds5d;l^jEcqE z!`;x22wa3V#d-&Vb(w9f-!{aOs2uQ(1}az*DOCtC?+2h z(+p!m)4^hAH1`29ezrHCr1{? z3%@}-!k*sVnSf1BE3O!xg~o^N!fg(E`hlU!9FEbzZl0pE??X7knwD4I zh4L1-&(+&0d>zRb=O@Fdn$>VXIP4WiULSdQ|CP*Wt#iWwJ+T&VzJ$=t>mi3x(U1Rx zw#$*A1~$g5RDY>0?4ul{MVuPOHRUhm7atr(lbv9qQPBer@ma&pi@m=LXO-T z>*%b#7dgtAa>T%)Zc#C=3a)1V2&@@3VpjrokHVycGsLLlRi3IX*Sd%RqefeGDjMX0 zZKFsjDGZhBnw3>5I+ZTU)tIR2LNkH=p4U)jhQSOv8hSyXQ26T1O z3PIJjYL}0DHKQL=9vR{_y8E$HY#A0c9Lml}4Wq0v`vD|35)-wnl7fNW$`OT)b5OJkW(X49_Auf(Elk=z5>B15%vgl5@l%AoDUxl;W zPRk%kTnFJD=VvS81>TaG&pU*)oQgJCv%J;LQajOu8gaISi(H$Z1{LC_I1Ob;5c!dE ztOOVCwRdz-u|$a<>Lqrn2KnM_;;sqbaRO}gXTLBtGbS+WO-oK|I2r8(4~oZqB#h@O zLAF3NKSCTtra`1ZqIvrZ837Ri3E?e5Oa=uE1OuWBVTCwB5FyeKI*1hnAEF&RDGyUNZzzH1;h%uxx#5Uw4geW94 zL@xvsq82g-6T=bG7~&dY1M!ByLnI-Z5N?P^@B|t}5dwfXKu95m5Ftna1R5e5+)oZM zfnY*ZAZ!q4$Xkd#gal%s;wq8QyO*<>xLMES0)d5yL-Zkn5I+b^@Q@gU0%8imf~Z2+ zAubRkh%AI5c*+Js2+@V`Lwq0z5Ge=^!~&8^NEs3Y%@6C2aD?)Wpprw;vS|`O`;x!Q zP0Z)yX6C4JPN}qKQVlW`Vi|G}q6F9MMeap^uYLjEOvwwK#K_L)?|1uj@;iYxb1J1E zMaygezkz%Ek=x=?{+ya+m?9pfyVMcWyiQT0wq?|$RCY1{m|NFw{u6iZ!|?@hDTk3| z@FZq-A^(uuOY{yut()R;)+}GOtJFc&Bwscozm|K&ml9WlgHMwR*|hu%{0HuUL;d-- zqD34`zo@Z>I(O1T!TGjgSG|MJlLpzo{CDmfn~9TV*%SPFZY{@lbIUc2;+Cb8fNYjV zJ!)$;4M|ZSC;(5LzS8dCnOaAChMifqb^9%4KV>TKB2`WE<8~45T0eCX^QvU!u z`yD@vyHJWM+o8?$L|(^f41!T{Da1IG9HAfe6Cn!6A#i#%FJ9Crw034HuLpt^T8uD; zl1fy?s%_2s-8lGnN|h%Ub*$=!jq4$Zp@|5EC{jeRZ1}dUlbesRJn6=)GsZ~}rqE>s zSCoCiaSmKH{iY5`4B;0FoCc+|04bdqNud&ngW&*?{ zloH`1YCtbr=neuciXveayS8QBhH)rFDpUdCHE{w#52c0Bj$_H8uHU#60tj74@HLdf ze4_nHc*^kw=mgm;fKZ2yA(Wy{;%9N)lOLc|piUC4v3@aiTFA?l2?RmYh|32^ID&*| z{1p%~P}m9VX4i`%WRyULG{XBW5R%XiggcZUM1rhdRu{vY^$-vtRR~f*pBlY?s3pP_ zs!(Ep@$5)Zqs+VCZzqI{Ge?j^(-3lH7g@c4(-8BX5krtaP2hww$b_a}K`K3#lK{xR zVjer94RYq%7v^|I-BF-Px8q(mZCEhR9Pt2=&_Uvw>~dr_8%YU_jwC zG4gnsodZ~)Vwy4e)XXYY9qWcs^X3skP}KejT}Uq5hZ4yc@FPQD~_oE2=%K4#uB zGNohNFlpX3f&hx6$&@DruW__FUiI$26XfIrx~%NRj_80~X%=V>6aeY^oZgvoG--Q0 z=%7lPMR|wJHMTZem)_mHWz%@wgWs>VcmN7HMGPvaJqg?1P=8q+VAO&qJM=tNwwDKX%p|LP{4{R%osQjP925= zQ;&_n8PXuKi&o1k3t*cALr&$DX|Z?BL)PS#d9Zh%+3r^kL<20Sg&S29bm)hxa4a;D zI2a|tDFx%QUB6{Y3n}EZYOQaLzwN=q(9m$bj*drwwx`a+Su?xN>6_9C4(=`w(V??g94*W0hZx&Mk-bEyv*iIAwcWHuSe3fH&iNek~t}_RA~&Qwrr3 z|0z52ivJXlyy8DahNhGXq?R;7{~%aOSLuu)2gXio##+}ddd$UZjzmq^|TV; z(KRMKR+(v#Y$RN3sNxV{cox$B5Dq^u;C_XS9CK~+>egbyHE}x4*(g3Yj;6rjIl}~+ z`=OsAhL`UNmS_I(VmM7A^Q(IDa2seu+M_v>D@Nz4VN3Br=dzY9p0lCoZFO ze4uu`fy(@>DAyw=P)kl&R66xe&C|YUgmadd;HZ%32%F%D8KqFuN46Bd_=rB++K=Uf ze}L%m*H7T?NqHm7l3I?rMexzd_>H^8FMbYpgJ)s;yhItC_ja$e3QeDzqI!;LuV$;M zS=LRjW(}xV*5R&Z(K6CfeR4E1av0|37H8*L=jQHa=i+DQre^1A=jP65)sDUQmye!L zZvs4ggax{Cvp1#`&n-*u8s{GW_u<=}SS)V`A0@4#(wlEJX*sH%%2%aNoRs6q7a8mK z&$+DMLZ;x`zDMG`TP>H^D;hzX+s#qgk~A;?t?Pk0Y#JYTt-y!N@}N5*CBX*0 z4p5M(pHRNR{!mnAjlIyDWIp>+w2gQ zqzbI%)zs$L6w+l|6yI^X+ca>=D716jTH9FZ7R%(yfik$QS4w1zqAfVj7Tu&l-R#-3 zZklub84batb8d>*4TioyC_2RbXnQ^=w4z>Y&E19_JI@&Oo8gvfE6% z5SBbz4NSaov{Lx8nSQ*GhDYph^Hx_5eLA%1vy^r0w>e&gX)1-ScR!?7Br>g$?ThW3 zvOT61ZDu*o3f4KreXUr|3ZFUkCQ%5h78$x0evKaYieRCjQFF}cmBe$=dgQXrp&OCg zt8}`_7|nTP^&Z0T{PH>q|6nE2kTJ$aDl*Lyw}B{q402B$&G zQd))1&3d1kuOQlY-iS+FsjunF3KD?Br^0X=jvAHHgO-hl@91Gumk&z&`US*nHiaI# z5+qLU@0d#*M%9Al8#cWn^shDpM1|m~#J403H`|@fQ$J{BbQ9owIrmB*GU`?wpi3<7 zt)=QD98Ie1EWnl&#V{*MpioVylcr`6AI4GDB)4GMtrDLOkZy-70EAbG>n=^-gx?Sk zvB1TIhETfYT#P$>L~_5y@*XI|cOku8lERQ2O%rtMvMUNyh!l8BG#HGiKBG~7=m8@K zr%2@&qc^MRXjr0d5<=CWua^2gh80Oxb|j%P1${k0Y#N$hI^vL(B)`xt_Ve6Zewoq& zR+j5vRs#TW{#CQI)7B@!^D7s>JrV|fNghp@w@PIV>qcmWtaSZbCX42;6K&3k&m6e> z15VL`jC{5_7U3Mi41 z*}6oOApx7j`X^izT?erCFgm0GR$$Ap`|CLb)_jNj2bc)+6hj0>;Axy4oeA{}6WX@< zjMyGe*LxrR&2!bbk{U~Z?Dy)bh_^~!jyS$ibl%VPO^n?wESIx+6lPw9kt~DLQB{#? zoxK%s(U0L+E2>618t&*h6h!gttA5&*w@G#V{i~l+DQue?GITsjxTa3Ez-L$iGspac z^Y2}6=2vu+`&y!n;*v>LKB(eF8=lhZWl$&Hcd1w{s0ZS#=pL}wB9vEE;^d2vlY6Z9gZ@y_R_VD zXO_z!g`XGMzM2mm4LIuCj#b~MtiS4fWE65vbF!>dx}JXYXL-7~BUr5wyt+5~pEQz8 zkLJt2O}FIxS2U7;aBKfQ-STsArZ8!V;D!iGF;m(f&gBgc&O_2TOeAX|Qmnb`CQP46b(jqj^xISs_$e{?y8^!V$5<_Z zGC#6mZ{?k32XXkqAeuc<_+6_TQBe z%r)g5u)mq7tbd1nYVvQxar&R^Q;e*PoXiZZKV06a|Aoq17-Wi4B{B||1UC~)EdMY- z+#NN7^a4S`T+@jHM)?5Cz3Cq$#a<+%M)=B+CLbh!BakR+@PoIY@=<{)Swz3`l-Ly6 z#&{6R8#^BPtOfNc>8XSdJ=!ewbbO)JeSJ(D<6+gjJxz3cDO}8@O}wGVbZ<1q9jx*! zt;#Hib&|lDy821!dcC$*rZ&cQ4p!C{#^x5o(3<{# z722Ku--XuBYVuELi~rw+Hss+_py)4@>4jcH!KUBT^ElMMK?O7WSCO62Qm`ZX!x1J~ z?Pv+T-bq3@7^1@6E7d+p|GE)L=OC>riP5ft?Sfe0eCBRjfB9*h3&|$2fC9G}7t2Oe zT9TpS%igCjA6pG)1m_|c!hB*3ELC(o>9RD|#*Rh<+6|gC>1^}6E-=se#BcF!%(<65 z``n|!LeB-1@Dtb5SvYRK`c40Znf>uImA|Lo?A71tHD-@zs_Tk0oviV3=~S(xgmDjC zQIHx3E-;z=Habc;6Vfq{K~-oRE4azybv|$s2wlp&&c~vfNix2_n$c}~4>;GT&RPAH zRPpl{#JTEJs<9gla}yfLU*Lh@o))@v3O#3r)7^-l>_?!YeP4S3` zPGzo23gE)=KpH@u1l>Ph+1w~0m_CUbur`_2JUd;Nj=ME4=(Rq7!tUkHNf~O^hDqR7 z3S>>_hA$|u;zY+<_I6UIsQs~}NA?Y>&i@`7kw-)<8KNwe6fBPlGzR5%w>@6Fw=a{n zEYnxtD5_L54rZC+jS0OVH7>)m1(`tQ*_ZNZQpb`2eoViO$Z*=hr8B!88b2e#8_RNuUW z)X*U#@kw{n&h8LY3{GU}lFhLO5w}KVHG3e6Y5Bp4^?$_ZKBA1gx0c}XgCjxrhr48^ z86I7Txv;(e@bva>+N5i+!KcJHG$||0q@jv4+sOy&DYT=0U1~FZyW_(xD zC$eux1WzUza^Ys*H*1hzC;KM*k)m#iuzp3}l5RQ!Yx{n*)Own3T#xY0k-PbMiw=dj z)lJ%D){E@A1Gy^Fz|1@^EVmh18*)#!>f#&wO1i2S@j)&t9ZQ8jZkCUesUSA&hzFT) zBCjgUm69IwprMGRI5GKyd_wD!fQwHSteX)Wv*j~vWz15ho62J8G`~hw)++8=MT-Nl z$tIoWl{5|Ipbj~mYm0M-H<;2$9iuQtIo(~j1|MWG&T7V<4hMylR&_29_az9*aua5o ztQ(Gxw+@w3#)B3{YndDPkj zdjSjD_{^dy3OCn$Ao$BXWF=&<#s_-n!&6v<@X=P9K`$BuvPdLAYK@@FMlxYz^Xu4$ z$*p1OUAPOTZF?h#4$ns9L7^2WMvARL5tNJz=SJ-Yw+ih^@sWF$Y$lXb^F|bZ9R>7K z69i-xi$hGDE`u|(qFni$DQ3z?k0FLGkOA-PON4JfFv5)i4*=R@w(lr>nJ;|rqVS%_ zA^i4{ZA&KOQ9^zyRF?Gj+q+f5kGaZr=JvIAVTj8;{&wGa2zoFdFR2}SevZ^ez5Zn_ z{E5U(((=1}MBwifE}Q?WweSy~nN<0Yja&6k0Z#j0s+F<9@^b{eL3^CAusdR8q1k*w z^eLDM2D}kU)-E=N73GjXI;f+Ec!XXA=;qRV+jue=U;g%=(n(F0dA9KsGQKqJKk-w{)l!)*XjB)%EF? zlv(CmVmMm#ropxl(M;M@SQc3h=NMI`X(wkvG)+wK94-1Zvy60;vqm#g)pWBU@X4VJ z7CfBO9GvqUyG$eF3d{hRapp}fPBt!9Zk{Gi4pw$Hphb+5>>}f&`5ToH>vHAxbQRJc zTS-p^^Ej>J^I6HJtxpAOU$MH@o|4r8w2PYw_nS~!T4}I^AR|^02!z^V{hPJV2?v9r7AM10N34Zti zO}Q}hng4o^@J5y*cP-W=9nET1%qJ87S9SUPQCN9h@m6<(u6{NIHstbz+x)Lo#?|8F zl;rh4CAG)Q%ah~?f&oD}s9K>-KSNkNdbveVCwJ3PFHZeN8*`G|!kO}+Qq5H}-0i0v z(vs6<`=ZO+GIG#pbfvK&vZfQNx`;4gP`clQEM?6|tMfeG{5UPhK~<^MYwXlR=u7{G zeS@d8C7SnqC@g7an9-4vtL1W;sH|g?vrRXiaID`%_Hc}RS5^0NCpzL5%P)jn<`TB6 z4GW;FB)(afo9a5ei#OyZ6^gYZ>ERX*{L3mZc`7RKw@)>e|0+5E4^K6p{&}jQ76$)5 z)sW#7qD1@2SOC=5ki{CAO@5ziLXjGJLTx^aapBNRZ9y+|LQPD=MDiXWmvth=80wO$ zWAmVgS&Secl_ZPsKJ6)|RFgrF5I=6B&;P>UJW(U-3xQUCfJ?|U2_ec0`9_z4bQP0- zRr~ui{~LOXuMhPqx6=Nu7yF-wJoocmXX)PsWYU;_10Y)ct8j3o%AEE`>I^grIB!!; zZu35NpMS=t_0Xi9{IMjH#XD(XNl+eAfjEO%P==_kIf27w2jo;v5swie#>Z=xR>p1q z5UVVJ`=*5CYb0z03`2oPu^9E&SE%zRncNM5&hIB(cP8<^563$x@tvx*u>rgFJ^uF# zS^;)$kB17k1uO`ppIhB`?*eZ!djfR*=vYEt1L?IkjunNT2lZZ`Gcp5kB5U>7QTo|SHe-Ku&XD{G&(BZPEh0(WoUzs&Y1A-|uiGkR&e zTK)cT^7_(p@USxEQClvU`mHo@%QIllQuyJ={&}F)#gE*kjVHz3`^npum#a6zm*Kfu z-4}C#m&>(VwH5)#U7p><_uCP#x0%xiFF!*)U&*cFx^m54cCgLv&su9%;9p9OZ;S#i z23c&?Y6Ib)I!{7hpZH(bm+piQ{Ll2x{SJ{G9bb}I{eMnq18;`DKh9$&24wl{P#Qfv z4Sl$IWPW;n^}fsY6o&PGM%V0l_cDHd+!c7+wdK2X9=Poouw$v1eW=xQ3446b{evao zxb^e4uYann;DwM&%m*)?MpCCqDOCIBUHr>fqhZBYRD z&@h@tdJ6z$woijB^MTD90_2eHmi1TtnUeO7<`+av>q>r zau4h53Erec&?&oM?^!_Cfq92Wm}04pH&Yadn$3UQ0AB{a(zT4BS~lvJaqsHKsLEPn z+3{ae!Y09qgV@kW)(8Xym@(fqt|$Tdkf_bN2Iyj+9$@^Zk)f>jmSIv%;tn&+*rz4o zg6)Kd6JmDwfer_AGi+Pib_c-=89MaAsQ8B1NzH&aYEyU#!QdGW4Q;J7b+sH_idE`RcFnQ0n|@VPs&un203sI%r#KBdg?KHQr1k}y=4W{E z^Kdz>AV?Hau#awvC;)WOC6&Rowz+*hW*t1M>$CPG9Q2BGWX48tyZ-TQO`ex8S<@VP z)|Uqfby>g9uu_=fi|va^KhgznxPBFU1P@@41^8^v@I$HvvAE^9S94cV= zX^3fw{KZ%N1*!O$kWeY#Mynp2e%UDJG<}!W;gfOXZQYf=`yk_O3-JTV(|g;&5G6Vfex9q=h;-wHVx`i=o4Tw7{I&Ub=oLWWVG-mBLCh z%iY8P_)1h)isEh7&yYcm;pgxs#$RiJLQ(=|$>LU?#>CSIxWw~WNx6Wu=9%j?C>pMM zsv|$Bx24I@wBj0g_QPjr$5XM*40KLn+OI3|`^R32Xg@6Nbw~{pVaz|K<~mKv|B6bl z^N4w#Eyt60h+Zq_spM`y-P5+0&$M-bio>$Fjq41#Dsfr)u~3KNwH~7oM`ha0G{5G! z^;E*jL^tQ;ISw3@nBdKBMX}uZff9l~LSI9i!n{K6p|}=2CGt9b!;}j<2)+(LQLmZk zlgAXd$jtZimeRL8^D~x24L!Pz+kX#zECy1UD^ZQJgu>auOywrzCTc70_VU)i>8o6|E96ESla zf83c(##x+IE;7#>Z{~AM%v~p?Fy@EgD?`W|UTMDQ2dppgXf z;W^SadU9`7G_1NdSYI*>9aYPY*}A|WLlQiK0lGfL9^NrHQ1XqAg?{LJq#HCeB$w-;ku){$URpIO7z$}`F=N}oSn z^Rz_mB$$h%F5}0J3W7N)IJ@QrayBOGEBc&Wt6ah>i)DZM0;%#5Y>m?RAR=EY`!L4j4{rU$H>BSBX9CL#tCkkOG0KaB%E3F6za#}q|ihWWlY^Wh%iefz~g|&z&6)4 zE%eR{EQBa_?;EmMo%zK55uNA3YdK!1?SQLhs~)d*tpN8R=FnGQL+X&C$~Vxsrt5vP zVCP53=r`T;z~^3da@9UZ9bhp~kPBgIH9MR0Ksgn4^%a&ZQO`m&j=q43qaD^p}Fi&B9LV3xQdqaRYQaa^VoDWTA|%A zAznoI5fQ#xDMwRkk1mUiub3?4))`s;5?1war*rm4bAA`xC!X!9eCUJk?1e6Q|Is$scp~ zDPEu@-eht=1N#DGKnJgDm))dmQ65(NECPAE_sP7g&~`C5gE)#>Qr)0&(NVV0;?LQH zxkn>xM|9y2Q^jy`)}PXvCJ=P1<_)-~i9O{+8nEgTV%r)b-80)cM6$0SWx5%j@Tw_V za~Gp(Ve(FF)M8^KerL=bB0dm9d-FGO%=*KTqD!b#*k~$L05yNf4UAUhXA;^fFyyTB zDG%?E8|CcTBFGhp`8?)zjx{XSvNXyh>amyI@Hg(pPCDuOAVDa$DYoIv;$AbJMM3H) zMKC*H!oUX;-|}_id0SIfZ2Ndr<>8dlGEC;|%-ck$o{Gs^P6Vt}opp<&RECb_0^31# zx)!UMUo78!`*6v=;9_;ea)wY({D}!CH8Y}QX!uwaS6?V7JtrH|rbfpsf`^vqcr4Y( zaUVe_j4r682^ifZ@+&RoWTE*4nhZ__tne@txcVjszU@fN33oqb?8&Qj0q!j%7F`Z+ zLC;h8I|OY>=`UeUCT`o7M3u{8mwh>9!(sipd5Jn5B8`>SAQJud3l?Yw1#SXHj=meYtL0m^iWOf4{@e3 z&nH#Ll-{cn)iqh=L$+;)(a))Ffm=89(hZbkR@I&uJ-`fW0R8&a0jfhT|##V&p1ed!DsYu zHc#ziEpp<`MK(Z_$xlJ&lnH3RCT> zVneM&NcEk&Dgl{dihCvUwv9GGwmbGz%pJS#?KO1^rDr&pMlvkI8guuOXTN-&%lue} zlbVQD`Cr$R6LoCL`aYx9XRgJ~U2rTHh^th0Qcm!n%Y5cy#ANS~XeT4f<>fw>77JIG9>{?$53C-8o z3Yi%f(t3_o$nFz>#S!Nf zqhupv0jd7Xky9ZYfbXG?DQUMIwCW~zZ%i-SURal#0{^ObM?4gnqNhdg)0SIPKPc17 z>Y+=zvbyT45`>exd3&!2!^(|?j0y?*KS&#x{%8=MlQ4tcuUO2tC+_ZHbBX?Wha~b+fBi(jUhI? z6=vNgTWn>vKI8IrAjSqFGiR`wWz+-7A|0kEoXX1&npic0-%;D!hu?rPK$QJ>`pcj{1rT2y;qo6mF|-UY*f+Nn%X4u$o4qf_l?|M(&>kY z7-1V!pQl2Aj6ZEYK8WCWHX*d|@%`o|u0hKAMYC@Jk2o2dNF(n=AMtNyzVdpLlL5uF z4mHtzWp}?yok(kgACWCWw$eCn0J|mD6>cWXhR|rAd1G_XOY>2k@qhdG&NWVFq8$QV z6vszDirTM#WWZq?^0 zwq}MmR|(^AuN2yJ0g;;Mn=*Vyx4KuRaB?39^$_4EYR%V7X|Yhxb~|WRP87HhcZoGQ zGms@(vg4YM;zG|&D;|?Eb)P$In?Q;Fi2>?Sabn2y+vY-W{*AW%E#p!nhQDLi9P1}!Z z3EI_X^R!5)M&xD!FXDh3X>6Mue`iGNWQ3^#3=+)Bbd*7)v{E$sj*6W3#T%_zm6N|{ zFGRn>vIyeZ*T)f%EU557t6N)TlbfKZWGg0P6|`vXLG^?2V^1S-1DImEX1=A;_1>G$ zz8_|nA*Q~;{%?Iw)V7a7`_I6Uf&FjjbBq6^&nHzkt#DOPd!8q8@{W9>UnFt&0v7N zE6+esiIA2Nn$PZObv;jQIIwrjKakC2PE4hH@gHSvXL(I&y!BzPKTB3xvQL72CUtci zv}8014OSAiuc0h1s2SgGO9}Woeod^;+ZB*e`-qFv$6#v`n|nLO=@AO3Fs4E}Px3TV+U}O+ut&m#%be z*Epu%t1f#(Hd(giQc0u|fJILj-}1sC2gkMTD>C5TNP+A!=Q&5SE19yZaM#I7Td!Eg z$yBE}R4oMe(pRVJvEsG`+Dfuc%)A)nqZzKRdjhmio-=H;Kx=8G6New(9r#x0x^?&# zXgfUq`brNc<{ERnTe(_IGPf0uC0>;6dBQJB5n3R?3l7#V@G)l5E@QQ9005cQ%@C!u znwHfc-49?iY$!snS!O(@F_j&Ik>_A0fQ}Pu8-w4Vf_s)<*AAT@zM**46Nc@&b=vnK z&g2uu?WviCIKETS1%8SRZ_%lOC3*JS7xqc4LZL;gA(w-mQ!4h}0w@q2q{2%5MP z4?A30LRR6snapJ?;eM+(QQM~CD$!srzdALY+_(TUEuvs#D|fH{GYHwA;JoaHScnJP zh6j|d8Dz2@W^CkCS_{Gyz4L`Nve#nNWyL1gnZ8|q@#>5LQO6f$;`Xw$jc&5K)?#k7EVx zPJAR=8B(`KI`+25RJ$Cu<$Z)uv>aMZuC{)EAB=wFb$D5^})bep<9E{PVzbIrLZa_^AOD(MFadA=vw3ohtpsi^XU2 z(pYg$N`UACD_eAs=$^K{L+1$#2Zd)KKtm0`RJ%_hO4wwqD> ze#>9yZ~uiSa=Kx3R000uhdc6rqXt?2#~P%rXN@X?nVY`ZakWX;H777PClr^WSoE;c zwZ7g#l7U~0Dj08L9{DkQfAX*6pCotC^ySUPBjPU*?gY(3R`I{&kxP2dLKt~7E8xel zk^eyvf|LU#y@mQPbv@>$!?h{EJaS~|`Rq=&>v=uOwCi5Y(Q_IwwS0KiM4c(t0(taK4atkTYmB#(28}BH-*D zcg^|gyWO6OAsR=OT?Fvi^&j*p{x*w%EBgMIvA2lNT(V($6L%$gX>Ii zM&OXzCb(O(^sqNek`A(4~%*UlqZLV+`v75e?>7fmLIoH$csMKcCX)${szem>c}~W3cj6 zBvli$?&xXAT3Hmg8nS2ZE6txaef(?nW1ue!ZWEeJk6yqvoXDmL(WtX0Y*r zpssQV>VdE?>dlM206u9X?WF!;@%+|rNT*>Df3Mi-J>@Qc9MDe5Uc6trG){fZY7Ej!h{H0v}@zzMH*|@26P*uf?+?tPbmONFons9TL;i_K!?z17i zcHeCF9t^Y*V6A? z{@SA2=`!)N$B0_xpu`4q)ftCd*<+pn2U_!5hw`7$^wFv`SybY@7huD=)2c%AhQXF0b=_MO6BToD;2D#*;ASX2P^95F3; zp<(u&qhYcg(=eXdsp_+81A0%&iWc9-vyfUu<#rMNH6-ZnDL5CsMunTPat3wq;o~g< z%j5d>AT<3u=REIn?!;HIl+oH^3H^?pZ4vc3! z6yu#`Fv087m+?eeR_JRsTzt{_`?Q3I10d(QUH)ptb;UC)>ZY+iBwx;GP=nBv%!e)R z00J3%y^!D0^w10eY8K&|#;dMZrv(;CCJAmNyQV7gI%!kWL9x$kLxC9rGY4l{}t= z)?m#CDf9F?7`k~c`x_nUitmBRXlHagMiO+zO1FIaw~VPj&d^Pw(}rw-%ocek=O05K zKeMAP8jId=RoUju{o$Er3``zyub6+bkx%dIXEqkC&)5~F3=Eg?Yb4R8=udXl5xkk2 z5aC8Ou>orXhpnAsFTT;UOqE%;6Na8UrM?fyyn>+1>ZjQu`&h=3vNDCY2ST@CYs zMvi>BOMj?)9y~7G&yo4Bm{FIgIgkH2I`H2jPg?z_4*36Q8bHUu_soaT0K3$NSk_vOU;V)0S`nxX)88U1Ui`pYzgrNquA|VEh8WRjTIAatY0#S*gr zH3?Z^VsEo{gY{oLL&`u=b>rVZek9oZH{Ju*|0x@nU4Gh0$Jy-d%g??!9+;s9-lqQo z?yv!7m;>(c&tW8}y>OD)@LgvryyYUyUTqXDu>OP2wK*|5oc_RDzW$9eV$hf^$^0a*D89O7c}hI|sw z4%y_0jo=GM^aelBPDMT{ftVq<%whfpIZ0s~JG8UJpk}~(GF@#+i#pyzmXcM+7rfQ%z7$?wx z)m1fn?0nhAeoFuC=U*{W7jz>?IDq~P@0}%g2k^rUju?pggiGd69wz~qj!Lf zzG2~(fir4Z);r_)Gwz7Y^SLP1WTH(aOj?r67jofR=D1yXH8eP10(o*VdebiK0@qUq zs}M>*=M&+5HD_dHZBCypH*K?F)?+WP5Zj=%#e}$5Cgpkw{o*nKxKb05l8w!<409>M z@2cB}{A^1AST#F)8K$Y|BBxXhZ@=SO*$3hFH(I^ATwa~v7&FEK8qZ=i5ARd8X-vrC zf;P}rU8c{ecJy(Gi5%|t-75&$yZsM*tt_?qlgjca0T#}eYh28F6 z{P;Umo+y&mvAg*U>v{W==is;txGOP^ySv9F^8KIfKTIj-;O2=o4xsMhcChbXqT9(` z;Zk_g^v|4~{vHMPugY{$Za}QlTc-EIS&IE#)O3avohM|DW!9PHN>c&gkwFl>xh4X( z!PZIjhH&dSA!foJkb{m$C1F_7*9$5b&I5XgJF@Bf|850z0qiv$u-fJq%C>+Rn>xx1 zJGIWl9aPVV^nxxyNn@G~gL%#A%xitl$>dEU8zNjK3%;N{vvDA#kxTt!MnbPD|5E5a ze=hGB{v=8%S8s8F_Gh0j>yLpg4yjILpW;DFSyHM&!+T{{hH#oFWoas(zfJHOWtxHo zMog>}rqCLuP=G6SZXl;9OY5&?#8pF01ftDNIYHzLtC^F zYvh@ebB54AEW(^i|KZrZ$Fy)Y%~-n78(vWq@MM=xpOdDHLV-v2-JP+@v-q=&Po74MFI7hSf&$XRD>TUZRVtgEUl(|^3giP|Zxja+F_EOFr3 zkQzs*7KGm}#gju|7cG35(=dDzJT57l4FC5fp+FgHh(Ug{J7%f5avJ)ypdMogaTm=2 zlch3|u5?Q!i+t)$u|d3Ly)MGxV_2GAki@CxPIF}ia}$t&~g zToo4qefb+XKCobKnJd#YBR=UFPdhe<>mlq36^^{m_Bn7YlY%SHJD$d0^mFcWc#7ru zwuoKv6o2b8y@%RA@K3s0c2D->d7~V|w&jycZncF=WK(IP3%{Mje1egU5R$A>xyC9yPZNZY32)&%+Q|P< zb-X~M|J@9GmsLWOjFrw_o*R1$nL~Wb2>9vL$_RJWFcbc6V2&HGx-M2uV(#ol{Hgh3 zpK4HH#T!lJs!gjh8408<%L*x6qlF7uU(PNVa|-WT8FyQZH|#-pMAF_bV}ifdfi^Mr zoUiUrSqjzZ7s)3cFI=5o+Zk(qHVA~jr*qZs8F|pvL5RSv8{&`l_LbwSfs5-*rH?m_ zmP2#RQ5-U0zzj|(Zli6NP2d%acf|$i$EWU&#D1g#)$#~if8Gms4}Q~7U|1j~VD8Y! zi(lm))$z%YcsA#dWO;A_4`XS<0~z5DJNjT=9lEGb-tNXom2gr%p@``Ymiu_e1$n13 z31=2fH_r@trp0n1KLwQ$o*%qc)=J#3zYK8c5GhZo-%Q4@%O1{qMXj&bVgjV?h-{7< z9HAb%6%d-!b(EbPI@LFd0JF}1=hPJ7PzB-cZzxi{i1SX0DVtvTG=@KACe2Th$IpX% zc_goOh+Q0yaE(I?o0#pij=wBi?SVpx=Qp?J%ad zkQcNw;C?b+plj)R^B0`arY+w;lFOpIyoqxL6z23ueCrZ9MhG|2!LbPZ+ENuTy4OTg z=7Io>X_{_G)enaAa9hs8oZ%!%e_b#{fbyj$SNlI5GpOTs${0lgeESY0ctR^OWbe@0 zh37O*p8j?vo1lo{`8eS)L`pA0W?kaCi2co+kK8MzYMNk~1R*h}Y>A83xo(!eyhEMU zPljKlM7sXajWfV0vFpwiV6jVybAUn9Kw2q!Sbq~r63b@JxXv{tWq_IPU3iNASyQqa%tnm)ER7~PR2c@VN@uS45Xxv+GW z$cr%fPIPo9F-WmQd)TxOy19dcoj=#GAo6aJ(5B@PDl2ab^x!ADCaJvpA&tI|l@_Ld zvzAPrTn=*fbM8_|_A1^z7rq`jxkzr5XXJUwTBgNhK?T>4BY&52k~~=9f`V(=HO{U6 zHFBpFM2#SjpUVugeEl=uIlGkL_P|MX%fA{>Ex#GFA(>f^^HpNFE`qCXDY5^gS_W5v zk6+|Eb^4?T!Rg`fq>u0@nTY%3TGvS<9}?|qM$a=~mFF3X#c)T9%{s<3#q{cbt1LQ0 zU#q~0AY5IGnvOFUU64O}#aP)eq^=6lQ`OcQ26X3$t;KS#Q{AOqk>U633b85st0>S+tg^J)gMXOcn!vi6UN|5*K(-{>r=zDE!$0w2j#NGK0tX9kFlJZ^EM_e zc*|D71&TXJVp;<4j>~D!Iq100f>s<)sA}%Kwh0ruWLJ@gx1Aiu>78uJG3;4-u$Okz+1el#SxZ*^AmI;*4zc0*bMePl6u zo8WQA?}~HU?WRLwMFv7X=hURaKu3(j7(%1KIMX)MK+Yy67=eec2Om z7y?(r%K&rVYX^>YsqLL>8O|h$b9^Hv;Sn+2o$i)tq$R^p)3F zK0B|_ITOyYD#11Skb@~#Zj5J`;1E;<&sNT>Hq%Ytp`>J0#T(|e#L7N}ZA#un)iVE+ z5bQ0u6Dc7!s!gTCL1*N7ZJp|!Q5$&ku*XXcA244ZstbArG*Br)FMb2??_1PQ-_5&{ z2rH>&mILgDpXP86A)6cz@Ism%aPf+?Aa8EZ+kYH#*0apn5>2||n0a{U`WMZ)bLRot ze+FH{Iz1_z7Ri@X;D=6IaG1vm?h4cxu}@7o*+sBa_5Fivvg@O0Ounh^6qHnAS!7c1 z-k?m$iF5IZgD(%>F`V1Xf+CWB;+Du2vlu0<;0Py+I5I>h8U{^m45cMmVQWZbuQ6m<(R;*>9atom+ zbfip$;xwTdR%r-C*1((fvU^e+I%<%Oal72|hv1u%adCC-tTDER| zy7v&Vqt(BOX|iEqA!}NZ0B-!+dUfK7h5r=0S_z|rSeM9&hS>?)1K=#`_uEgm3{v;_ z#uvKWk;CM6XphA?(5>gUN@CK~m?LQk)!p%WLz)Fb*iQ+H3Z(;SGa#34m~N`0f6-V& z7EkHjX_+063tkGCmE+jD%ef46ZVLsB&tQ!unaHpu3mcn2j8ZvdtQXN^5vwTZjK8+-P0`KAd@As#5Xato zHL{pkY{{_q;dn)F?#ei)SUy3Uuwa^0jHS@)t~>{83;Qb7YTDI#xi|l!h#yu7H3?LU z99iVqEk@FgrdsS6$y>3;ej*jb5l_QW`le0P zVLb8R?mP+SI!)D$Dpn_2H=tw69`#AvljWrCQ_0yIwdoNsU-DBKc2)y+hAv^+zAInB zF`GH0vol&!iKCtm1IgMzDTyA>EapAFfnew&=%fhS>FfbW zqo@UK8D?k>LhHPA;vSa4(VjjAP1_LEi=g&CV~OoP&y|n|#p(rrOl$+>mNdmnxJyRG zx&?F!Zh4~vW)rzvuTFbhJ+N4dC1{Okmi?!T?=9ry3@hFrgE2^k0$cj|DP|N5CXy^m zDyb-7i~*N;Q!+``6u2oX}AZMI86V6Luv`KqSeSPG+ zcQ|#Wb!mJ*={o9iAjWCuPO}n;UkZn{s@H}gsp6*$i9wbw zqsV8$hm3gsIh%z}SztIt*88;0atX?VEXY}QwL;*5`!Gb8!& zfCAR0YkC44)k2jjC5y&^NV^-N8E~^r&_~Cg>;tn;vG$?!*I9z5uBk^$Xl{rs5@8y1?eZMeIfV%PsY^Js;Vp4LjVa3&&nm8($u@R}IuHw5Rg+{2`v!IuKuFD(K z+?ig;4Ju_SEU)h~Hg_-u8_1f=5+0$N1hA^V&g@M{Z0H)y5d>>+N5rdHq9`VLm}#hD z*C32do)_`U5|FR_obrfsO5pD!6UY^R2Wy%C%MbP!#8)BG1v^%&^MR%Lp5`+tb1i5O zR}4ywT_IOv#3N-YymG*)_pFwz2F%GC^%TO_ZabyZ|4yHAmUxSPiIS zwrelLI@s)I<6&AR61OD}jSkXAUzv(;C!`am;Nm$2rJkyLMi_!A~5Bu8`_TZV>u7Sl|fk3#R$U9(XuRgL0Mpin4E8+F` zFgIA4bBw9FJT4ZxA+5;r$az&JmWt6w_KF^pg-t^zp4qCNJO9b_A#u#2ajEj5{ph$Y+LHT?hIvRc7WQry9 zgxSg*{l0}DWMhdyb9$k-K>G=$orp|1(>0ca6^x>rVt}E+jExC`CBl8N`0qS`4K6p&Y;6)G(67Q*yvxIQVj)zu*ay+2R zWtI~8#z)+d&PuX zq+p6%oJraERM?+t%bv_mgOCVhiOD14xn>u7r!0NNB`7pv)GpN#(r~v;Q?fa(+sxvp z6Q8d=e>Z0k8v=8>iuTe}jsyw_1UmoVm2Q=q926kp2w2D`W zcL7}+EiT9PSZF6lq946HBG^{rh}36{s~0&dS^lhx zVE^{u8B=;DeEi{HF!_E>%&-+@Abx@z`Z7_gN@GKj-;Drchp81ATl_Mqmd+)z@ZO=1 zOD>i&m1OCxig_2c=!Ix`oFD>F&^ZiuNE;8Z!X;{jHAOI&oLEkq7t?pLiq>7ED&roo zEh(>4)iJa!3^fWJt5DSyjSQMrlQOvBoh~ID=17xn}OCeX&k-k44igjPVGxz)_r#Z$nd<3|2IvK5C3$#B%}N zX8L~WQK^*jh{yzKTT7jmutrrwoKnSj8@?d`vran)=8Ki$OHs;M_GGkCo|RUAfW-kH zf8h1=m1)wt9CQ7gL3@FT6V5(r5%8xU&bodaHQMnc{)GL=DQUo6cEKx~9$mckDsheg zQT77$qO4hRKo~3pI3o;;Z_+-9q(4s4y7Z>ZR@qa&zmDv|7Kb%(g_Y-E2;Os2SZpqI z!YEcpO``xosd@U81NgOLI0G&W@*mQY3_N}V5xMr2WE5OIjBfc~lqF{`x8V>?3#VvbqG~ApRP$wDtzi#8&En8ZUP79+QerdJ)H>zgZ?jwhW?tm! zcLHJ4siv%p)r~md1$9m(Cj}&BmKmq!7#yk$v!LSIfw&@}uX*@FCH%s)3uwxhCL9hw zk`YGaX?36woV&+2cyNtm3YF~%tYLC{#*k{GW%+s6<8?*{iR6$Exf|x`RQ5C&V|o%< zN{C))$V_R?88`Q=P_eAK$O~_OSsPuj?my7m!{&fArlRTPFQoWRHM;iMHKKR0EDqrp zwOW>#b1-YRdRvjnSW2iUh{%KpqQKTluWBZsqy-GlhhnKxXCL~^51l)~Ql*0c)Xw^` zh^j0S^mqmz$5@?1KxQyYcWPb`;{F?DzuirZ{k zZ2u7RXczP$Z6~zJR9+4_ZP0R1ohGAsjojAtNzGyR;kpMc$}Fx4e(kB#ZMeBZ%gX7N zax#l-jfyX&N!b^Wzxp)ra~)i>O+ab5IeqrsP&iMIo4P;w87{r?NAP|3$4KI%r!wk6 zAS+Z{3hf^qe~qm7xk~FB5*lH@vQ=Lc7C~R^zVFGWZ~>93Z*d6_mnqjx9D~~uJ23`j<>FyNs1i7fpSUK@43D;9;IYV2cp$m9ogxdx zRV8(JHYy~GcUk*!r+v|t`pIO?SS+Ka<#iPxGXEHiU@@|at06)cc|^sCRE^ZQm>o(f zUP2ik6&t$|>*XfnOu&D$X$xQ%9S#g=s>t&TuiEe?v5C#5ZHJs`DeF{PRb%Q`ETl*z zO_nP2kIm9aHx#&n#@z{~HN}m8bV`af&kDd4DyNOt&88(@!{Q4?EP20?)-w zHoBug4615qo#N-%y1}z$jK(B8{{2I#>>h8^BUhY$Ji5Fh^HeE~E4aw2v<~b?M(Fl_ zpRVEOf1&vh_BXRw`c~FXj;&$NClz&`u*5ZR`$Z!cs_BA=(#iUi~YAyZd z+~9L;^wi|>BzLz_N946}eaRzB@nj?YM%s89G&PXzp?9qWEwMDgyUC(iY28HMGUiwS zXgEU#JQ-_H4Byn4RME#%th+EhVb-Y9SRMKU7T1syn>2G}F$(9C>s)y5IDF=MjE7@I z)-nzSHqu@`jl2g3@Vq#@y(+azT`V(OqA625zk|JIl3EKnUxMz1{27SH9k{ImRWc11 z??SBWaot6i);o2x)o3WJx;|QH*IzB6yg@!je`Z^65^A}8lxyNieLM(z^*`>s$B}(8 z+ikkBgS(;Hynh@^Xv|-50NQ9`n;cTk^DAp1_j(>DFBp>FZ!LsG7;7G_5Bz z-Q5|lV(-9=M=W7h2|kNPE@dK)97K&o8HeGDL#m_nk88g2bO!}9bBPYGxm~H(?^{i! z^$lbTcc;x{ZeK~`(Qe(04s8T?t-I!IwBEP!?OrFiy4t?0Qr_|mUIobR+5N&4?2Aea zi0GO7h#%YxcqZ8OiTY*t_nG?FH~cjZs(W^4pxV*7SJim`xF?GTHF8(ITBR0^m%#0r zbr&ya`W<&i7UI2WV2a%Y`c^i_sy^GOW*1=DG6Yno{$kOhYRezjNGreT3~&yJ8;@uX zPQ#>`&d`IWmAoBush^}jws4`T+EpG3205?2n;8E9Z@#JEMb+)n)h&QSb+6*qn66nx zcJ~GBUJjDq_uja8Bsg~>T5o~cbS2x#Fw``i2a9m~{H{y{V%3kXalBR$NHq_sCxUoz z*8rL{tGK@?x7!vo05S8Ssqou>Ds?Iss}l(ly+WGmnEedN%X2~Ni(13MAbf>U|5HON;W+w%M;y=2d6I-?J zyodsrx98WObpmCEqXg|_%A~|)Q;Z1qA2X*xkgiVbU0*wH2)?3dC=1#cJkCP}7{gJ& zoUiV0oWS-LdqH(^iHVPLMvks-4|5s+1WTD+QD=H3P4eBagSAu52nKxr&ddGvyz7(r zi$*|nf@fhbrrh@lMN+9dhwI#X*b%v_>w{?fT7<|Mzw zZ>aQzp( zV5qgRYG^W!jKQnfJ)ZrBk7{xBDj^we+6Uo#qN#3|^gAUOi#B_xxwPbOIixW}mz~hk zkkpvJm+k_rG|}R`;?=RZhoqHHwuk2}(%4hq0b0XW-gdutD4q1JTJVq*hZA&{ zI)~w1jdLuguJl)Ix(f>YpNX9!%iVH-px%(KB|g2#PIl4v=H2EAb0fttNf zudcJVKkFb>(QvcTD0Q}J_9v2|R#`b|$gY^7G|GlAh%#q-Ipgt+%90!U?Lj4Q57ez~ zqY(2~&%5In98Xg%$pS_;ghE2sG36c_ZN}1m>yt)o5n7w%?s00 zf19mCf2YZ|(Z3zNJ=t4kJGk%n!vT}U_HTdQfF1Ido0E?}^NcBx3OJrO8<51Mx6W_v zaMwFx4^AuHb;b?-W!^UL1!9V0qUMM&+vH7soi?wL97xw|P{S;Cbs|u9z^r#2YhA9$ z>s~*Mb>?b9*|8RAw9BuC>9{u_E~SXAHNskIJ2>TO&s4+49o>a5mxxudU*R^>+T z2u9@2LXLUNhr?dj43`gl!G|j_n!8OK0Mgz5YDIAlzd#;V!ouxG=(59e3IP2aFFtO> zES8u|8a^*t&4)G<#87c!c8J7{w@2?II`>2!L@&&XhK{%R22w?;B+ZG6#j&(rqv@|N zrJaqPVXvNSBD~!f2=a8uR=Q!H<+VY<_@sv#(Bs6au!OIF2XcwqavzDNt79jwhj`>S ze$i}29n)@;-?BvS_~~n`QQWK^bufn~L0Orq1zv)A9ZtsBfwA#S_q%>SpTs zVzS$tW|DYfVjXZ_qAQ16rpS3hg(vP}GEC8C8EN!@&4k}^;{GFZ>nJSPos6uR|HV{- z)b!9&wp8`@i=|aWDO}e`=OEHo6U#&s^Hrz9gGiRaIiQ$VPat=U7-W|KvpZ_uoi)IQ z3;Ga3T|?DU%jOkpbGRiw579UHr}jX7eX-k(lDF_?iMnFD0eXboEK0*i;Eu$b+gET? z>FmV7wf>;2=2P@8>&*CODvmR1UH*fQ5tGMok*Y0FR`|H6fdLq08gBj&iH zmH^4<;mj#XU(-gT+)3o0;*(EjknHK}%L+$c$Y&d@}eyTq>{TcSerP(}3XD4cRUN;hr zqO#Rx&=mC_rnu1pNO`1k>?Ve)AnB=n`+6+a)Rx)rQY#INm5vYG&6vWR3WTiX zX4P(dguY~*|Ju;{j&@>%mhcOn6qBJGNpNnxQ}4T=XK}vt) zirtgA6jEsL=2%j#I3$s05jNM-M<37qa(mMn>G^Yk@u8&=oZom$%zPhqBU7e$w;e!c zi%>Q4orZo0oyl%yQ=8t*xXhsNDo!C_I!UK(H&R{*mkG~NnmMYxGHyY0!J)hLAVmYnv% z*&f_`U9}Ec?)>W;EcF1kVYNq$e%Y5WV<)cN&8TIzl95+W(Urj&EON@Gm-qV-%+nl8 zPVd3{H6;23j9`zD?!HD6>-R?Hj1Pj<(Khh=r3vVddCuvfZrEm{ar)%*U9od zCv#Ts{=IkLhCR~Aq0$X4SMdp$ha&(LjY2g-?8Gcy^#lH|LW~^6TCTUrtvB8yv0#fm zgz9Bc>YVbqid<6eV4Qm+Ne?<+u3>t=+)Xga4OS^^^5FT=caCnG41u8r1~T=-`g)m3 zX!{Psxp0Es^4=F2uMaQW*w_f!(gLP~!3jiv!;{_WA=rOd+Z38^MYCP13RSqhC|hiMOo`PI$`@Q zccAqhgzsgc`{?xo;VF(nFgFm!dP^kZ{r_|PCdt6Og)5vQ6(ysLM(!%{pbu;i5;5Lt zE}cg~W_L#~$^CI0Dy2%VSLbfkNmJN+8+}KE&1v^n^qIYYI0@&n{-3q9!#YPj)}TXWt#v`D;V_f&Z7iciTi{{JAL61 z622yMgSk?T)i4=7#r{eBe}s#Bu6M2QLzu!x%US9_;b-m*ySbcDAN)sU+bkq63|APB z{J}rnIiMf!6Yq*ze^E!nDs`E#4nHp6eDeYPqAfLQN%eO(Pl68gi>I;N|uy8#Vsag8KkS{JTlL= z>Ele0`poKS^Q`?+P+v`nG#Qoj6l#0$NlI~ABli~8`NY^$Z5wDc+zRapouWl!kr}}C zyB{9K`3h$Y*?&lJYRzD)^wFzh72J=Mr;kZq=n42@V#6oNjGIWk96FKT3R>sBK`S`2`eJ@EdkRO{yy zTXou~kX(tBSm`Y0Bk@0&d&lQW!?)40W2=z}PDdTvwr$(CZQHhOCp*bbI<_%= z-cvPG^M085&D8mD{()V!SJhK>U-xr8Ys~|1FvT=$(`si~sZp!LqfwVe!q$rZm?%-q zM4}tgQ&t=7bR`I1D7cT0DA!By)x_Nv9ym!f%M+IvAvk>(Vaq0!DK?R(TP`rqW(LR* zH?RDzj20C%7vE{{B84p0LsObJ0ACdT_xq7j&Z4|i6~f}l1{&e+%(W7J1l5>m)sf1@ z$<*Yk3DT)$3X`#Xr>c$a3e=Qj3K!b6qLX(Ch4kS$QwquB9ZG9PB`-hqgEweL8BwJJ ze%Je>Av#dtuC=^KlC4%0Dy}CqLAJQ*BslvR1tei$S4YSil`NyVxCWtO+8W^-klllz z#KEUk>3$?rZs$o#YgBLDIyV_;4-DB+7CHkoDDE#x;bgf;vtElps-p|MC| zmp6+89s0*oGMq!Kq_vDF&RtrZSPxX+% z!4DY*k<(OBwSt3g3Nbq4at_poMufj!{tY#K%3;la8u1i)$;E; z(GH`uQFzsQUPA>6!?eL=%S~EH)&oG;(%(U}BbD0lbk(gQ<>Iyq!t`lO7SvN#V+P?- zk!n>DP7#nMQHv2BNC|8G>5F8IqBPYGR)um!JGNoXG+9u;6UYQ{e@r+8(a5V*u#y3l zHD>vWyZ-dL!8KaYLkrp_Bw>koC7I+qgwKx(JD-XM(*!WA(9-CbFPM{!mz7W$lTrPY zMb(v?gdCRWiIbQmOCU3vTBMb#pRfG7`ZM#5=!3y@j0-hwRdU|z-^D91_sJ=rxZ-J! z-8R1e zpPc_TN0lywPg}(abs3|8a2F|n1uo8KT@?+$NJCVyRwTS`U+W%QTHx^6Uk2`~`nyE( ziQaJRA?ab+sy7yu9{;FTRCF(dSo5Fr4LyV`{t2ZR2RTfl={ImBgq;Cz zDWI;r>MITZ3saq_1~3)l?{Y0HohzyR=j{*o7v|nLL~9F&0ogXcb+xbNp=>oaDQ}|} z$VVBzY~yI{Hz8rBH3Qggut^?f&3m@XS)TFvJ?#6OKH%af9JI`Qcm37=teQE!gj50+ z6AK_OtZZ!05Y6@ggN&(A!VPMx9R#1Dyg$ovU?P9iK= zkzji!zTOiQtH@h+XT!qP{N^q&OZB1>_DA@jryD2?O%<%uRQMjuNYTH4jR9g=jo7MX zQR~*GsPm%c)_=k+14%oIN-)3rRetb2YB@8aryIPjrUK+Y!)~-+qXhaex}}&XLGgXb z^}f>q%~$(0+q3oW$yAOzMeihtIxQAQGQv|0V*vd{ae6Mv5L>}^&~oy~!dRUO;v4f? zifn-4Zmx+Q1UI&Q?SpPZXzj-5n0&?|vd+!#xBQgQRRkfiQMdlW$ytHaTcq>oN&iQw z2TtXs#w`l#ukOSg%?0UT$0#6hI_FBAp_!tVYe#UUS8vd=($I&==(K_((|3whwX==D zI`Cn9VIX&aqWCOF=Bgn1V<}=8rHl|kg##32Pm{XC40&O^H$HYx{Cnz+rP`h;vv;cQ zVKnSd$5`+-1${1@Es4Vwa&=!=1eov}d!O;`U6@Mef*T8Q2X^D_ze5!kZu5-r(0H*YQ z%ElRJN7H}skP2qvu|>FaiODumufBtAej<9ij}PGAdso!*!TgiBnjdryIfEcuL>NB9 zyfh!mGrPY2><-ShmAvvxWgMw@-)|LIHVHd`j%XepsLVep6t)0CCFkq8sHq zj!8?}ay|%|EnYIQBmz4R3>x@1Xdi)d%;N}8hDp{wfrEXWTH0UXEon!U_XX7a4^SI4 zpiu7q#heemNp_###QGd-6;4kX(Zv0>UJ?(KO8#>l=NE93Y`cNa8cJQ}=C8;UuLVTz zD)uO|p8hjjZP!zhM^0wGH?pw-6HF7-!Po~1{ z3x-Hg0<#$Et<+YtE(XqR6Zl{8DS(7H{q3H*O!nZ}=6r~|iFOkPSvyKEm;vZQ$MF?s z*=Nkt;eUhUYQoyhpN0{>*PDf(geU&_d#36&7na{X*kOhSwToV=n zrv4S?j6lyHwvJcsW#K+#OtF!B&_JR)!CX7n^=SKyT{XcyBl)pfwZ~BANVGj}OJKYt zO6MKRj>+J)Ly0q0QSHkSv)g|zKE2IEBSUh-oa3z$))9DjC=2VXr~E=LKTA!{>8c;h z{~=TqCD{0iD`eHnElBvx`2n^;WoR0=0Pp^B;ICvGuiAJ|Qlp9~IJL^_8;0pz zk;R$h69Y)hSfhKZb1-}@CE#clvdTb-oCez7NDjeGT;=yw)Zz|JEC%pMY)AJN+mdbA zdSo49`=BIPm#%lttrJP=Trx^%@j~4cf246q)XOhKJa~wp#CBK`e?Zce=TYxlQ&bV| zjcU;PM2)cUmAWtAz8{>eTKQtz-u)b}GU$$zlWJnHaU9d~FMPcY|Kqp)c@zCnYD-sgT)p>@#AIvKVy% zG2rOe1e_SWR4neTJo5=bt2!q#xqr@2*Ud@t9=0a~NeFs&UXzb9Zy|$Bc*z=q}fgPc8EWF-?C-eoemh28Qs6oB`n{SNyfxVhpFY z{Fi7d=y>~Du7;)e@dkE2w67n4-=-HJwXr&e9SpHQHzRxZ2?(sx!b`<-fEuLl*b=%M zXR>tzQZ5*U7>}dgznV>QH&m98SMHNlKjlfS5_Y*siA=fg18LOmTQlCW z^$jX~mvkZh*H6AioC2QBr}{7mzC6=-&wz~DKJJ(l6#~VjR3|!<7i}-qx=3ULDMXuD zq7u#<)(hihef*}x`{eRhnl0o`#Z=yRY&GMWrpTFwvjI3CCFN-_r8E?~aQv)%9=iL= zG{@h)BN|yE|GYlCtYQ4;-_Nqyu6z1No@3Bl zE(IbEs&c)}H@abNF;5nztPR#;I-RgHj|8z}`|N{IKGluawBO>~;*sEj`sezU-X%FRiZ?*{B3fTbhNfd9_25oN7m?%2oq^LPY|aPsbM`OuZ%k^+Xz_5{y=cc z2>n6P6wMX1OM5YO8-2@4u2xb}<%ZoUoUobtbJ|hIe~($VQ6Ia3fnATSiw6kx_Hauf z%@ta^PVZGU%|3#m-Wra4q|_dv;T%IIvD)>5vat7KwVsfpbBn)aO>;D>p|=u%lx2EQ zNbyV?S?B3r>bHGK-gh8IjvrivilpK{Z`J~sHhZCPowSv0h_#xdpj(R6i}d_Da-11= zReRM>(ZFsKUv(D`Vu&}TIqxTM`H|W9yN3q3Aezt+#w^O_X71AD%hd3|KI4wDCq&<^ zW)T+UUOY%T5<;ZB2mC<$HPY|~$|$#}(J9^Gr<^OqdUpg-ebqiimdjf-57T~QSsRq$ zHEJVaKy6~^AtQ?ONL5~wE_^2h3f#Ngu{u`>l;Ik}GhcwCOrPr-cX$U+;V$VgY^+s$ z_*HJ9On$dw+wiBssV98$^!EE*;uK&&(ctQ!!5~X$fJJFdW7u21QL)t&%*P2OZz z$N<|pL2G?zKYO>OKo(K+L>q`x8}rmo>MT5)$EhE0aT4iY+N^a_3|&w~K*DghwQ-0dSA zI_ol7`3X?$(zL~%i^VSIct{IxDftSWvc;b$3}dg0qb(Jk|DDVkc7QxdhXaE~SExPr zh8LoO!xUm6Hp?8Zk)0NFlNOjg_sSo-(u$*NQ9wvcoUJ#g^<&(ibqvwAE(zm#{uL&S z9CESw^=nH%#9zpqZLX0ao@tKk{DPrsk`p35-Hq&Ym<~i!2sP!o12t-+ddT;J7n=-t z+a{LdNM1+W9g*oWu-K3$2>4ORk=SVtun<-=Eceg|o54AtV!VlkCkMG)WBo-V-L*Hz zfO9HtD{(_DA4IyCTYu-6a6$|xOm^uoeS}#!ufKadAyX-5j-T18d+BX-6dBneAyv>E&V_a=pngy&TJS16;dEMgx zCR@{+tgghJjF>inz`E?ii@MB1-%SxBBe9`-6S}`smn)z@*ktr(uCf z)XJ-0^QeTe+_6hQmKBMSPOsTFZiCCC*7ksjNcc&{4Oy5pF`4z@OSyR#fp(Nd%$35r zN5>Z>qmD7EEGI(WdO9a1qJPRoPna2fP^N#i%Bd+7NFL)O^wt{(F(BI7y5F7h#YrAbWDbRUtFuJ zQCyvv!B|k0|A&5Nmbzz5$O-e*@EkEqzp*EZr&fSSoAf>58+sbs3GWQ~Pt)1eegF@w z__Blczje3Jby(9BBq~ft8_zd2m@j0_5wKw*CrEQfg@-%}(~KDTwF5>ztL5%L(|{r9p-roq%^F?U%~EVESyWRSb(pwr;_V1z zHH!fv=i`^9gOk3?NWw9JX*MO^hvEyZPLk+4rVr5Vn|}A?fPiD^AXBn`UGkRwGu>sO z)-D<0Ua_56+xM2(yRBc-j1BFhG?ON6+{cJqksEJL!u1G0)&~vNU@rZD(94MObEr96 zgjT%);a~zb7I+IWisEaH7pydz4S`t!yG6i(Jwc}i8xXC_j)`NAWoKRY3$(KD-xe&! z&k%GzI|fG;OYbovV^8Y2{)O_W@_210; z^y=)z|1nyd|5a!W|1Y#e|GS`N{x7skk}`B53*}ZM9o&ikUy2eX&gcjK&A72Kp`7|3 zqqY5Cg_ivPLTmEB30hWmgZ~lhSp5GP>zFVI3IA_sN6^7*+Wu3ui{1Y#+W)+Am*AiI zKU#G`|1$-_@5?t86z;zmo^@aX#Jm$EE%l>24js5>Z9{&H6N5KCG{yD)$>gcG$ zCoMu2R}0cg=4Exuy)yhG)bd}1w8EGVXsN6H{^vh8{Rx$<3bqcpJoJuiVTLhR!dFHT z36nG<+Drq$C7Y?otuW`wke2(%l9wAqnAd`TAeuEV`;^0Y33xXMW3B~P5_t3*ImDLZ z9LzT=fB0`SHd~Tq0{>}lnv?zm)s4gd6OH6m=JsH zp}`PvdG;Q}>^k=zJQ+saY<7Kk`qW%p@3Z)JJp%x)V`BnV`@d`Ig5edXFc)Rs7RhU` zS}b7R+btMBz3#{Q9VaH}R;vecr)vRi0Y06#{gVUaIa)qPS(r2uFkkua7ZotQnM7an zAi7B*pPLQh>*K=d+FDJPi|1I+-LyuNksD%bTo1eHCK2jsO&05UvDhva(@Dslv+~3q zk0p6AsSSPL&1jC@-Xl5{q5aY-^5?C+7@;R$MFR#V41x22YAknMYWFT?ks@yy_vN!8 z%Ew0kv%A7tdQ;vc_g>jYYYc*bCGy+5;2WX+mez{$d0&O&`lG!m;~5>O!6o3UPHfQZ z2flbhyOqQDS!17T&dO-$bplQWNXNB9zN|s7_g+ps%CboEC1ak`g4Q&tR>&yYK^$u-)H;R-kZQrpog#dKc466bPnQKIo1L1hsqkU^bW6unpg;(sj70 z!Os(l0wq6kBu4A1aA{0jo@m&Fy(7~!$DKI6P9pQ!pLx>T) zJAW!z#1;6^vE|>0g{R?_+ioU$<{~nqUxn$bfez&wX^83GX~5gb!Zg)Z?>&r?+bm4i zCUir_Zk=+M&+HbZ$RvgI6Y2@Rd}UrwNS`}(97XwlRf1AyIqH}J;PuBfix;zYEqry6 z%^Bde)9`mj=bn?TNAGn$z2mx&YM$IzB(}>e8S~a6F??6`G_K90X)ED4|7gJVI>Gex z;r%Wn_Gf1@kgQ+U?qJs3M+#HC!gC+FZF<>+%||WOmWA@T6SlebqY+2Jr2y7D5o z=F@?g(`IaMK+G2U|g^B23+AUqAfI{~B)Q)O3cJAVay+^TnV)%@3 zZeigd@S#oE4jZ{`j8&A*xILMu+U=%)xn@tIQSz(&Ak_93Bnnqrx7?M)uHO5dREu5d zp);RqHC=g`7ncbVtplMBi%<4?D4q7a6H@Z6ihe6_aPF`bj{nesSG%cUH68nEq+=kN z@{8!MF?h@6k?TCTi;h>eP*Qe&*zCKP93lP8;ysQKqO8);)@QK+-i+Lwd*FO@7MET6 zWST%D$TZfnAo*Ma{jB{)usMCZXzYq@m}k4?xbop#t2+HdBHmOJwbI(jq<35k(e2FS zXILwlB4(`R#+x#r6UsVL{%BZ?W+bxhv=&pd$Dkn5(S~oLS`#x}a4$fY&Y;DdsQP)G zr#r?m3K(o#Uvn>!3|QHmsx6;Lj_ZHN5qTF|)`Ht!Xt}wMwP)F7`CM8=jIA2O#MvWO zV!?=2JjGF%zNUlz3sb6$M%YPr#>NdP@Q#IjE9cMhj$2LVh!i7_MyM`m$vD?trjKL$ zFcl3ha8SP!;WexOIEAN~N4INu+ej%YGjd_MRP)ZREjUWQdPtr%9_n#Ji{iM$p2Q<{ zNfGQ7Sl>^7G?HmtkI8$g_YAvAwZBZX9~WSI5;%;aOg?wS$gyu#4BKTEBf7cWBp_!j zedKdv9lf5IRP&A*eNZ6h3pAd7*3f0!QcKpPt4h{}F2*BXRhDgS&U}gaK8OSn$zy=B z*fxKCH~Y9&Z3dMz*WGf%#*!q)tBLBqB0)5FVMrTt{^*zw576|B>6B*qq|A81Lnj

HTWbXo=#s{mri=-TUkmOWvz-QDKTN zccMUu3!K-PpUQ4~Ae8nIJ-DUpA2OI+i=RBRv#}#0meN_g_qKDZwV1qG%>(}|z`I(_ z9A>Iu@6jAz#k>LOsX7Jq*AB^hy9GTvPb2J_yjSfkN;fzO9wU@65MLfWzES!PsN@%% z&fQ$G*4GZ!R#)Doohw-ZI#l(WH=}Fp)Z~w&h#T0R5?uA|?1CZ@N}o+xt|UN@WtHg! ztX9XE2v<2t(hBDb|TOBT(k{!QRd`|R3m$8z#x_)9jQsXop>5)HLmylPUzeG+}Ob`Cc( z;(EE-x*dZ}A>6RfFhq2|b#ov^*!CHf;&P954EviO-nNTR@hG!^>Lt2$XAHN@@pV4^ z`dii9umD*h`h#j%d_Vx_;$dz4DR09t#nN$RXy%D z$K<2pk`bnwvbupY`weqxO1n_&Gx(Z2?5H?BTj{CugPRrjYqGJY21|w9zM;qtdm>wC z#WM#gzI!@M%1isGMWl7W$rvvcaHYjKLxUG_=U>&sY)$fE=S0Mt$b$nB`!=Cp;n;v9EG5rc zXwz)%_4l7LYI5#}4}dCKTq`1(Q9=;#K9JRwonw(GW_vnZ0q1KaW7Hvw8Tt-l!&I=T`UpOQ7zO}RvMF!tSmsXGLKD?t(S zh2CmW;^ZQmhmnm={N?-&PU_h#sdq4|yLmi&#(Y99p?mVs3(*I*3)?#n_svWQE=NYE zpb<;Qgn3<)jjIkC4-+ex7cd7c7&wyuty#DHcMg0o76XXA>f@U%ZEXMvAK=~*$`^t zW5$tKLYoVUfN+{%+=E6mQEBYUkNP%EXCpTaCCbLDPHJZZCOND$_+D^S2yKBug$Zcd zGQ;k!in9(3Z>$8NOz!Ffp`~v`CZz!l6LfronD#+%A15OrDn$F|`R>7$9rK9Y=?7t@ z`hS>4&y1gFZ0yB{XRpfEi@U>8Pe3tU*`Wwgc!c_~e{J{>F}}L|1l!wK6G~?PGCtR) z>u8bk@6*p;=$4frV8CpxWC0xNA-Ik$FED1pkyZyM{nyAtasytxVA&eK#-;-_C2943 zu0;4%;VxF?mfzYG2y8p_kV_>!{tnEITD8iEM)a^W7a6Oi+ZG+Xzq7R3FI~K60T_XHPccwdOCo z&R4@#yz@U$IN40w7#Z&QN7A);{n`ggOuD6VM1m>zAY}9R;wQMGarx9f59^o2RpPYT zs%$?aWa!d^23}Am2ge)(u^(K(`1K~`)j}N!9IHl|7=wP-77Pa>xbyc&H4Vz_|3d%T zejJef8x4NiWp@bvXCJ3kmG#b|TK#<^D9HZUM7iqHW^W+jl|*ZtGcmHoTHV^JnU0-rb-ebFF~l!CcgX-H1O3--pQD^nj)xuRBTtRg*3=>|JJR<)$^!5w?N`I_Lgj)(ijMt)S}G#CybL z>;d8R<;e5kC%i+*KMinu){z&)@S9awmr*ZtxK#IB{&`zm{T0~b7$}i( z*pJPBv8P20c4QGPyCF#2fQ8e0LOR0-F)f)4aJ+D3@|NN}CdxHj;{=rb+#wrQjJwKqhK*q; zo`w$Gv>X|a3!Hf{sTQHHXJDsY&DekG&?;tm0r#*s?MBFLijcHbJ6Wu`PnEdUc~#86?TFR(<+?p`NidOVjeo)$sRCsU=VVGG(ziF z%uItto7=aru3SJE!O|_0bUzQ`0-;@4L8F2>8qCeG8{|Ip6~?}WtOVBVoRdrZyfA!a zgYdN*Lo6tAzCWJdfBE5#q-fmy^x$l1LU3a;t~jKB_6b9ew)W1;fH~YNT=ls_==#ZE zkX-teSIm5Y%AhRP=tgu-OHEBa0QPrHd*8jPBVvH>PK`W38^(OihZQBl7b5d}XyyL5 z0DkB@Br6IU6^Q6OJA2H1t)Xr`?7D9;f5HsxfPd0zOcW!G$ld%H(HD*FWIBT0+Iko^ z;cEgcGfL@%)ZIE7&`GWZjoHKO>VaeeJW*VA>gq$L0%~|G^N=S{$t*w4rf7upIf#|| z&$FMCAZ@`W)oHow(TzUYi?hMbeu>K7#p6f}NQ>E_UuV7-jF{8IQu|WN5=#tG!u$x) zwyhAt4erf3v|^Xe5k7+F$JhZeae=WQxe7WTCJgl^yXtv-nAvSuJp^XSDUhzIA%FnZ z@7O@5FNeZO`TZQq6xXJsdqHNwrx~>M&A>CXecvSl5 zweOwspZha`cFb-c&K2};e&!@_7SN2p`esOK40KWaN(dHu41wZL22bJPD}=CKk_rjr zniqzdF{I3zKS#wBp83|S)8kxC1ke~}@!Y3BL199S5(T%#!xkXET+$KN4E+~@)5@l^ zo01~T_%zoMOKd9C>u5|yx{gs{tJM9`T7!vT$?P41P74DF`MU-p)^=z!ly2OnmP~iSu4B}0*pfYL8HMA` zt~s6w#0>%5G7M*O?z~}WBa5M0T}8+lHhV?fBl$wx;q?;$%&?P8>f0j>@#2;viOxEl zmejc+qVOmf9-d}?xHRIFkAKbK#K>nMRJ=BNanoyP8=JYO4_h7PT@zqRRb<+r)#217$|5maz8cT|CC?pV_z9~$GW?u&@TLy);aUVdG%*i`rg zrNqU8>v460G9rk(BUZv=*t!qDkG{v~q_9j}xTZJf{-7#DAFiy+B#GWbhr4 z?5J|~ET*Gn%3sVyV0j`C7+IrYDWgQpf7Xtsp!Qq@&V1`iie(2}h}`kZ85-p6jLE1{ z{HevvUO7JUElXbwGlBNrvFry%fz=apO~K?>1;XU(0t(U990n`jwG2#Qp;tp@pJDkhuUQfUjI;I>ufZc=5~TyreT z_5`&6&{$cNSw0n?<*y*cKGB32KN6Vzi>O}(5zMYtr`LQ3?2__q5CvpdFgq5Ss|rtj z-g!n-QvX_^p04#t%cTe+dCZlC{mbBdEseyc3BezN4R1A$#sWkShEb?>p2@4xh>Ry0 za8x$I&mST*vuvj#AOs9YP5#yd*5xzPYVaaIUDY%XcuKJnfD#F8n&aLEJ;!_IO@A8! zk1G$3Fk}{`8TiaC#u70ZXOpMKGetW=rn-TN$(>k@T&xFW;uqAsnA6TL-(a5z2Ho^f z4I1!j1|*fC-Cgg3PnM3pjSP~Ae*K7&{rhp#{c%f1-P6oJc4aui z`m`Me(0Rw-bzW^Jgsglfmr~|eeofvn!7Bw&796g%*1UyprAps8`2LRJdK+vpF8hPF zyN9T15yeiUaS$$!o zOhDmVFCJ7CS4;~G*arB=Xj+C zZzvqt_`mO?;M~tksW15Wb93U$%)r@S+lJ|u^#EE1?DAVYkUyzn@{e)?W_#F**zz*o zQJtB$#m*#=`?~}8z`K;0!HwG!mK$By)RM%mo!-xJXRNv~`~u3BQ${nkc696`#gG61 z{6Ug#G>{qbG?IUBytvK)wlGph+sXjIpe06e!@dhGMUQr=O2K>x+rb^_-lK_Q>6*8S=-nT= z4f+Xb7s`&%^tbIBuK7BoG{Vz*^p{^}k1E;>G-sM=2v4+REIr&k+EA?@U9QtF+amq$ zJ*EszKKcuxkyG5X1tE|`vUK(Kz~ z%`O~Vxc=5X5Bn6dS^2fjB0{a8CS=AUE_`uE&#O<>=PZ3a(OHLEnma}FT_D{xJ#pujG}lG zPb!Brw-2Rr++Zdc+NJ`eTGc>bdu9SGP02j3j{YLzr|hl++kJ%QUP_c&mur7#LwrMF z{}?z`%ffh~<7j8&n?LGc3FTOXYtia&doK^Zd5P06GfIzTC2dT#3|yD_=gILCzMx!& z@a|k+^gFmna#-0jkJ{U&6&T$e$-RFQRTXFoQiV|RhQvKBPpl

bBM!if|#xyoLf`$37sQ3WGq6!K+h)jE?~&P3jR1rL5tmd?=x}-@-i@awUr-Jt?cER#uOAWag0^g)1++|u zQ#`Gw@58J+tB2{*q^aCeR>mnriKp?Pj_N=rm(JTmc z8)<%#+Hjn`$RPXZiV^Pk-@76nEacP|q3@c=+(&%1M0IPwd$H6B2%Bni>SlN|t#S$4Akw9Mc|hjkI&3&8)^BvI*fA336#3DATKWeuN;i|N z8r)Q`Hzl767y@6xiH+w2Jxz^rK{m#@H!*_SCE(s3d#AfHjU;tC$Ay3dvQi`um(X3X zoH3Dp4^Vm)CgYkFa*Fx30h%HrURg}ZA02Y) zjz7uR5{~b&sxzK9D{lTm+iu(P@XmRiwm>lcNkB8!6}Agu}6li`UwTpF@so8D*oxl4HzG1IHJFyv&h z`<0d;IpC|Opf%*R%2UOfbBvL~CzR-TJz}9f-l}E|h5t>&3cbh?_mBY4?>4VBp`{`# z0q_*lp(7U{g6&u94++p4eooqP&uW6K+)rSkC>CsI_5C*(^@Hi$aAp!5*favOX2JPt zjl9_D$L<3l7D`Ew{LE!FU+=KoZp12;9K{&6}2qKN+<~ ziH*q9v;FaOfxmPd;(q>sUVEM&uRF+N&IFm}=B$z zSLsVUg6g9+yA1M~v3eWL^aHkKb3Y`62op-($DFb_@dAP7i)WL%BeKHunQ|{1I11|C zkH>)r{JM@aL^IysT}V@1af-OyEQ;9pc~(q^Li$d@l&NPYJ9Y>oAgpPvnztKvnW+O~ zK*QEG{7IG>6Q6TWt7z3VdudsI97kk{EKQGjH5>C9Zt#-cwTft!Gz{VT9z@A1oCXyPRWpcvZikpOoM&=Nz@u$^g<|mdC2gI zdJ%td#*S`=;EJFnD&m%FCY(EBQa*s$o(86sR6(c$s@ObTrF9FN6M?4JTeZX7?pdn{ zD67;Sb9;}{!HE_9&ASR7V#)?%@UnlePA%N9H)#%x%V_3JGoL0KsJQ|{ zVo%v-Dv=doH7nehz{|*`xxA3Ac$cY7XUUb(SJy|7`gYw1QcCf*bXL!xD>(p}Fz22j zk)D<#?Ax;~Fl(oHt+H%WJm~LHpywEC)_N5-&Mlu(@O|XM4|b zomkoA3yQN+_R3C6oo};@0!j~!{C0p7&n++F^ozmP z1*6VF=HII%hX8ZVt}?F_n@hu&vVhVXiD878%JysW@`04l{QUcrm*$?^qz6%^>2GO( zD`EhjPy1;5GP`I}19dJ$?Kbz?nCo)&ZgLls?uvQI+OyK|sJ6BH0SVDQyLnH9L$5{Z zhGo6s<#s`V9pbcq0W3=q?{hOIfwrT}?R_cFM5!G011I`(Medil`piy7{3E;Hm-+Dk zm}X6(Q@*m)leE%%xxrgqnzrcl#DdJTS#uVkO#Siuwii*|M-X|I=G1&%Ad4?a(r>S7 z1__)%tgfC)!qTs8+epda56CK8;C*QV&za8(?A^jR(x6cNIfgd3h_lelL7E8%7S%n7bwiK9-{^or zHmwE@bYEru?*91kYOuQg>y@M8T)76hL_yh z{B0H^<_GGBql#AHY2{I>`+t7dKqZTX9@ngckv#l767+65($flNU0nUJ4F`$b*XoyE zGJib@-#rrxCo7PcUvh&_8!}L@qe)m~X!XV97lP`;A%=OfY~8Ixc9g2gjs6vx>|h_e&A$;&}OC=x&bEa!$cxRO{0I1=_RDYYag3i5o@kX9ShMmEv>;nI@=!r=Ybgxzm% zu`oNb9m;-Q%@L4Gi=>RJKqcwi^BPNl6>I$7fl7MvPW$(L3!z0>3%*=L)xv;%X4lwgr~JoZ^5N%Iq5C}hC3GpZ z00wr>yJ+*|Gz&F);P2B-L5SP}IT3VBNZRUipJ34#1p2JxLoX1y=-Cj-6pR!`_0&y17RmhKe}FmX}PI68YJXjoJnGPiy;%Di%}-{s`WHrIMXn4 z0h4nC`y45e*g?6SX#s(YZt3wDefkqLLWv+RTRHQc=5UAb#iVl;_N9hY0Vq}Oth}F0 zCzN`bAoxgxXOe%*7Ay3escswK>PcYJCo>tkeX~jEO1G5TPPWzRULD}pNO>OePYEzc zTqiiwKrHlM{rW$;-Xm4`nVajY(5>gK@-n+qRYJ`zOtW);^Lv#JKq;-%{G8}qps<=I zd#VE~odUn`ANxb&9%qW4kkXBUAVJ^tYlZZ3kcO68ak4$mb6gnW0{hOqDa(F06+bk5J?K*Pogtc~w`Xf9+DYvh%#8yQr?mMA(=lBqxAlV7_ ziIF?(QO}l1hU2iCJ8#Bg+e*NN*xlZP}g->rSk z`YBc;Cpol^S{Gb6&A`2Sc`=m{b4;DLKuQoUJ2*25tJ-j-g_sYSn=sjpl@DInTYNm7 zr`-QW&_NIW=k#a?Mdz$R={JFYhs%FAJA=2ZFRl{2IW_pbtoE=sm%COVuVGGPg%6`| zI4*IgCrPf-JmX1@+=$JqG#|P$hTI=-(gNcv z5MpcA)kWq}cOnp@$yhusd|<%oV5mLa=bQ}Hy7%yLT;Sqda(}j)-ePI7{+zujDDH5N z+d!;Q-FIMiZye9kb@|)6yrF&#vo|0-r zDWwxDn7Nl#ogrE*AXS<==$DnY`@6Uc0>1&PYlCHIaqC&qmp70F>vjsdRks>vb;Ec) zMOAYf#DxgR+95wA@GG=>g*vqMQHnN1rL-p3Kk@CobR4!|e9 zj2{6DbSuI3uyQXNzi>^nCr<*JOjHdT<|`Lg94Q7)x-c~ovrp|VZOD5N)s9PK&rgs~ z1~g@tQ3Iz!xJ|#&s_g7I9p3$HJgcy`u=RE6Vd|}6Uf+rcN7=(nn1KSDOva&Mu|mP9 zZ&pT%_&)$;K$^dg%dZYHv7KV9)#P>)h5S28CrIOlt@g^|fY0t3ER}nN;qxvt_w66f zLg>pfFL%8mF0D}T{?ip(E~TUcn246?r%U0RJz5o~_8Q-xFr1-CId|m!QY_L;tMV6v z4rTN(d!b=$5AC!>x1FPFUjFHdd5`q7MO6sXTuL}Jc&nXd7--I$=m`k(*JU$1QIX$5 z*YDiJ#LAZW)vjjxHk0=%T;AWya8uQ>0aaNVEi^t!hB|_hn$}D!r{ON6xw(VajU(3j zH7WRsVot&J7O)~}g%LN%gzRJkloGZF ziaE>;z7KHvemRxryhTY;b3i|oPnYco-A}1V;F=tE4P!;4rpaKIo7X#}i6{7;m%P-0_ zp*gr?cma2_5v5&pp60dDFnpVhBp9A91tN-IFReD3QK-(;>YYFGp#%=4{3G=7=b3#O z7;U450RfbH3q$mxGBMT<&LhasG=^Kb$rf!WM~xg-_-GJsWCIv)<{nx(4|$?eof>pN z8rgQrAE?VcjE08;lP}t2kNOo2hi+kg3AU2Y8{RrYNqS9U)*Mr*FuS}!&Fmt3?W`{ z0k@$1a2>Z`>H!p59(;i&uYL#1eY#PP=;?x)WWWwlZ4BGp zpul#*C5n?2vUrW+?eUc@Cn;#H&6*pWr1;?iyfL_}oVA3?Z0F$&3jRFJ?I*b#ZmYDz z@Otv;U|7>9%hY>GZxcZ=U$0uQA{gg>2t#0Rt2E$a%B^eY4_)(;wOSpf0v9AC&3}uB z98uhCXHIGt<{nL&08ycMs4eiyVBfak2VG#L>j7QI)Qsy6VL( zDk`qggby)db`4-NI9_AR#Gr)%fQJtO97T4$4RpO(wSTYFGnzFIZ&pQ#Z1$r@14Gm= zTzjIIc&f1ro-_O*9&5WA{6|APpcqpXKmL$%l;I|Ln;MX6eH@Ep(t%y71JRSSTCtY)Z1Z6ovpjv$cu)mUGz; zNy;rVw&}wuV9<@wCcA+Dcj5XrI3~RgeX#^g8c(gcL78-<;Kt83z#@mM##igZdGvrH z_t9uAiWjqrn=Mevq^nkZCtRGBP+muO4MlMF$X#;CdF!$o3EYM*FvD+vw^^?l$tI+u zT*~-3tt&jkFtT1~gcmX9ay#pVEvU zmSm#oL6U82=KVE4KDj2OIf04K^_Wxo{CGPQ_pdY2)os8xjH$`sA&HZl#uo(Mbr#gX z_DxQCGWm8jRD2+W1DE1iZcb4BbODH9H-Pf)u5pvy&a*FD!jYC&Sa1=g1t{BFt2w7C zB9dkRF%HX>)_B1i`_1#Xb+RH)H@FNuCa$+B^*cQokH5EuH!#kr`suz9l3Q`~eToTv zlcrEG@^c`hXwWo{t2MLFj<550n~7Tn-Gr8$P?*~8X7+$--qO{KPEcqXx|INZ?^cYz zL04~tNPh>H2zTJd2d&|pVBjLq{v3!+`hd|$zhTIqWBA@fDPYw`_;p2=^IK1G=aq^9 z6*Mr2u?)6&R)_16mIa(8eqJa{?#`B=(LOY z7i!J9R}r5T#sgkEFXmC?6hk+?&oJjtkej-Nd(b0Y5quOo_1)Ysd{s<%Gf(8>iIoeed098ehEHuX>Rb>Ea{fcR)Jw+Mj zWir&9Cb-Rwv~%-zgWEi8+*2K8H1e6=13rl+i@fbTn5tB?N)`In_}L z6fOOy71Vw~)4@1=B+DsPo6?Mf4xa@q83JO2`AVGLna8%F$+LEz14c%3TIFB7!)X3{ zyoNwde4PSKD_A3kWhV_t1h;}sp%sNm!DpZrtnZx1MZwLt81UGUUxk^yM``5W;*H?# zauNvx1J}MssYmxHV|iXK8q7kPR69|rC-*8C|3iu`rzp(+yV{86Fgzw8ZjLrm#CIzM zoz{?2t}BawJ|5twT@a%E$>dW8Hk5h{r@qWdnold-Rx|5qE-!$_OYT#|1v=DzNsBa- z1M&o!q?DQ-_QShf4YUm?o9_m-tig@p?)>PwY{u?^a23OVJglc%55bBDlMgJ1k<<$D?~Zb z`D%>bKWpZQA}bW(I7OlLHZ*}nu55m|Ny`=81Xe1d$b2+ghwnH!%uiK@qTp(alyobd zpo*}nqvI3Gy23wRc8KZ*Bg#hFt|8|=h~QXp8l zmXWd*cH;1)LO96}L9rxXGbsp0=tOQcfpQnwY;ZrzFYA?>9ll|PH~_PC)Lcq*nYgwD5v+FeQnY^WpO)&GA1bp z0!2YZ`+V*e-xJLk`hTs)X?2eMg062P8P5e*9CYt|jnWr|ZVOil0zaCI6M_!Jl!V-P;MI*OuQC<>Iv`oOVL7 zV)+`Sh8{&`{-^U{Kb;Bn?fzg##jiNnbO&NsQ8_M*neGEp!G-2xQrPY=1P&R+njqnx`xjFIq zTpVJ$A^IYsC=B}9i&f3o7DgeWmD>|*q`!@g-6&s1i}r3%#b0bUJlyo7OK?I2(awIa zn3HPlC|gENxv1);mS!xVE6_yOHx$aVcO%>KBbtiXPEs1PVd9GM7qxQT%U}SDaHQ(T zG|zUvgI4^8QpP~&2_lGN{2U~prTW2RRoZU>E`zj)?>`F+Oloj)T;?sIfe#d3@)4VJGw%%p^XMY}aIv90Fn~HM>M%jGEMidR6!#NPPE4#}C zLH1c)o6qFubm2V5#b30pe6&1dPDM~2HF%e=5mfPc7_^lx6TepZ+vO?eLx6z`;)mUp zHO$(*BG*({E8ocwdkemJKwHh^ZaC$~K=+|O^YogZ?~CDZMFl)$ey_*RzX7 zF7~jZEb#oiL%lZJ@lvUoORIE*Eq?%63{O}19V)HeKLy4fip(qa)lAUa%mG2migUAK zu6C)^Dt>E<1x?b7c%fEu0T+XV&|$KGcU1t{53y(1>a}2%BJ`?v2@|(IzE-ur{Y*8;h ztJX-jv76ENb-9UgxxC861)h0U&5yo>r}`WQ?{WS7X-|8P$(O3ad0QtZkY7BJuKMLq zj`*_HmHt&jjn%8kT{xG+{oKB{pB=B#QGJ>(CRkn3<<1goe4~oEv(PFbYVGXB6Z(ed z198ef(=K?k?~w7*A@kNwTRb5i1`Gjzt|nfZ5FfmLymJ298H=lX9{-lIDPiw{w;^hSvz~t$MNt(g44Z!2y09%2HHCbXbvI4hSkxm*?|Y67yYm<%D1}d6|(t z64NdO@uqThUai~`LCoMod9u0;OVwKGYT6?KII$7(G@Z6E@Re$;j06eY22$$dvSP5< z1Q#w=n$ZjlhWV6Dj@#z=r;*I$?I4>8|6>zlgAv4f3bfZI$Y_#mV%w)VYA)tOL9vF( z`&95%qS&u+ntp2&q1l7r1MxDS5=Sr7u1M(CZPZ@z5+9;GE9p{>^-VCcH)+iTddAh{ z8rq`eM$HSCQrh|=Wo{VrnXXom2Wrd?l_o7#Odg@!n0wWcyb~&o;1L{@@)@gH#d(p7 zyHpDRwiRWCuUE(eZsb4E{ctM?{|3tOf~23JG?RxE`2y{^(*stTZ8VBA$Y3tv05HmR zdJ&AH>wfYux&696nBS$k#nx&Q6|z?Y^`O-*=8fWF7|}Njn`UO2y#`=#fJRRTSMqq+ zx4YG5w28?-K|f`WDqe&OP*VG!z#`~oalC4Mit;)Py++d^UiuiG>ug2-Sr?;ubQP@Y z?=3frly=&AjCgWyT^vzqL^o(Yo=rI4hBjKw%sq8K2w`3`6QlA3)v~MYAbk(wbQ7(u zdsJy=7=riW?I#0bAx@3-Wprs2QKtF_y13M6MEAf#zYGiYC^XZ))dqUm&z=Hptf7$O z->Qwo(&56sN5RTRU{>!@8~HL`%9FfW=XR}X{kZC92BooH4;!|v)rfa6{p?++_znTf zYSt+Jv)asbU4Ec*=Stm+Q57Dq&dM@Ad)T7QR;C7>9ZfI2N436GUQ0ML07K`S)(dtM zMftSJb0ty&Y+s(fziR? z6*Qv-g?Fr@I-V~ zy`9EacGaBazyf z_bATbRJ~vZZ$$f6{UU(fTH!KzYXyaS$!ln6;&U*Wmoq$W=Pd){B~t8rR2OnK9CU82 z$d@>iZyHR@tyQkp<*^WV59ZyeTEDmabUI`^MyrQjr4cSt{M3Ya-CA+9E@KA0;wlI# zZQF2qvynl)6ocTw>`lW<4$#WMfC}5$$i$vqjr3Hjod#zpHD@ix=SnzPXZK(`Ts5X~KX`AzkzYca?x{EMw(U6Fg{Id3S{15p(cGh-hDn`Z~0OaUC%RHgCtgz)%4JMB0HxH_m zPjfIZBdO@7(glTf2p%AWY&9xY%W0>@2f*Qds>~ka=JlFP8;(2^%B!!>9Bz&US+I4l zEpP5ek8h)zPeVOt4ZQ7%5c;e13ax>^N+-P_4V^jHYx)-_z{aiWDsut6qS@a_?zLoj zidz|suGGyB^kxP``HsO`F@$M8^x1asM0zzLubsdK(*reYks>dxVW>h$FX?V(Xwe-o zwlBpi?^SOEGkG)L!IY80(0H60>d8ibP&CNe(D)FJ1ZT?v_snmrczL3j-!lC?hZ5e; zC%GXp34bDiy+4tIXR1k{;9 zY&3VpjPdOaW|R>ks_eaL{o2`=&~Ix|+H-jR3ObMQ=@VeWZwEg_aV9E_W7nz~1V!^! z?Es1zerVOSvhsgqME!pu(*N0dvxoh?EB=4Y-KGCTq#(?p_1KQA3c?|#86uPzEBLI{ ztU6i%6v&ECN$^iQ>JYzxmAr5hASp)IfKCXJrt?usq#7wY^h22|wdNn-6$5e_@p9gd zFud(STFaKLVPq-GRj84|Tj49*1Kc!F=O-|yQ?qyttsWbS!D@b5LegW3mx7wY7~V7N z{L7S~6e%NFwhW5wF~yH2t6;15#xuHziG~8$Lb;3sAQs_V5YN1EE^7r4E8IOi3*(>Q z!hx*MXO&G9rJC8$a1L|Mgk8E@ZQyCpAk%rd2b*MQaFRhswMNJV2jZ>-&)PokEWs zX9J9s+J!KDrz?Ko6EXUrwpAG2(`bKDz`{gI(D|`uLejzz6|Hok>4h(HQ>~)nArrQ8 zquy;itn`Pk*GL7m0`yY)qG8^tj)h_IRjnppG}!iG@-jD8(LOp4nKB#~FZ*TgCRbEZ zH%=<81cGpk<)&xRjyx|wK~BQmGqq|(ihLDjJh`sKVz<~s9}A;+DU8MjW=}Vo!9`Ho zzfmay;jB}Z0 zCt7G1?xzaWYe$(9bgsvxVhg$_2P7VEx#8C8a8P`;BG)qR^lBc=aw9vzY8UYpuX;4W3T6*qWX!BwYNaw!K? z!(7*(^%BVQ6?(b2-oC_e(-%41iZdzQbuxNtFWtPVIu>?<$*S!Gry*3+&)8T#NeFp4D!}7 zY@&Ir0!jXelGm@1;(Xh@K^qRk&)4A7aCu0D**lnxAj0_l0m=VTE_SpV`BL8O;H+a+ z*2=(t7~kgm=(5h+H9<}rv!61X&>-?I;v?w4_(xSYKG(z%pEiV#CSmq+PEc#ztIC;> z|ChO!o@<&9ieWm0l$^5~Ljq&y^EGhcw&<$mw0Xs3)p`_KC@59j*HsUq>O$2G2RV!y z`T9rTiC5Ijbpoo&sS1$$Z_ybSbL);?zu1+7H_uVLd{<6el3Ia~3GExTZ`FBRF_zfv zpv1dJX_5aL;L~jt2-k8O8gPL%m;gF2GyM3s5Y)Z8n3!psA5FmTagXgMKs@_bnwu@c z@Q4?Y3?#z-y=5NQKODw*N78!%AmBP_elTGyjDtB~_un|oHlTulrg4Xo7 zb>yyKs@!waKH7xw6})={4++p~y=gSFXH6OnauoKxzprwj^s{TagtOH4qR$u>oc62^ zMfI4%?avq$IB9iFpp(SLnz_GhhuNB%<9widfOt3hlra|O9hWxufRcD$FE>H@Hjrsw z_zL;^dLy{Y@pBp&`2&`hY@;?IBJv4bjs=9V8;C*b*hXNkH77g&}h;L0hz z=I^ZXM|lp~1O66Fh_#g}536u_mCo!>8I4?r3eOi}>`sI{U&O87_QKuyvaHDm6={7` zZ5FS(@~0SZbI@zAPqc1R{i3A`+5^sW8p-|@`OAtU-_x0RxX0RFRQB(a?~$W_5f?#^ zwXc->_c_u8>G-OZHk!qBL!Pd1=~9~8^ls~HQyze!mmaO!3l+2Q#=;_7r_`iY<@QfC z^FU~l*IjhZ2HneEFdFgSHL}n`{@P*UvjcGT{PfDI-AbVkUv;g0;9wr7+Q4Qee(jih z*3sC<5wO3&2eUgZ^EkM}OQC+R>to_BOFvEpScO@%u=+_>)Z%LN#zQLzi4L7d)D#~w zpNOjOkcU>4oHVbD8XYok)%c$^(=I7X1Xc&HUpr$#{}%dfL1oME{57+Q#ONycGkU_{|FNn0zwZ!L{FAG~Eao%+krXu;(?;d#9Ki5gHY@2+5;`rD z7|p>(BOxehN&#nRCMw^Y()+>B!3X9@(zN$4bZ2e+6d1aR~u^A9~7i8uD4Khd308|v0@Sy~dC^wU( z-V@H>^%_OKC<~vQ%N7Q`mp!TsCE;>-e6P@Y=U-}qyz=>MEb5eDwd6{icc3;oz+Ku= zu$*>v+6;TVH|#OdXz&3%zx-^44de~Y%`Y`jb&~2hkY%{#;4Tf-4m?m|lkPO&^-zqy(8W`Aj(gR$sdwFWQ(HE=jDC{ls;W>HtE*b?j_gTIdgs|FYDML}`Se2p)ttdnr zPf{&9BA7v$ba%qz-Ct|OFXM@Rpab_6ahwVQZlwEbey0h0LNs1Xwi*SbDg3o#`{`XO ztL=^^+y)wUJJ^q4vrf1WDM2y@5pHFjU^P3FOZaKQ(uvm$6V2CIAOsOs@CsBg z9Fg>H3dY>6mQllGxeZZxrcw%UMiu8#uJf@f{r)K2NqE(R z5Ex@m3h6yS8wS=GHBJQ&kj*wIJSJCmff@0X(f(Q!>_OPnLQ{C*6?y}Y;AeOYKYT=? zkf%lE*9{^c$j;@=T+S_YA+)=EUoW%Un;{&VyiqAHlWwFvjlgJr2GNj*K|!CPSbwhi z`N0(-|5IX=K5jNLG-ysC0yLk%LqJhTiU1V9wZseH^*3sMzO)qJXj*9ZOH%wIEb zKi*1*ExS)Mx9EPph2oLNs$#2Nb5p$?mDhV1#&$KUyhQSZ%RO%5>J7Y!kJqyuh+)0b zgzAy~4n`*EC-r} z6=}^hNEZEg0tjTSZYp}Sn5arcXYvN{(-U?heFRK$2Zq&cy-eIM%$MADL@JZ^C$hDw zb!C-)hJ?JQ4;Gtr^KrW!r|_a^*7+*gNS?#My-k(RL1Lg>#CAOv?6OiV9h?4MX764# zI}Kd&cHH7vN@j!1ykz)^3QHKSt$I)djbI(TR1V8Osfl-a5ly>)#gD3ewv23=9&Dhl z8Fg3WQ7R|nnYM?SO+`JsIMZ=LTiMkm_A_s zt_M#MLcQzK@PP1LxbB8-}pGIJf;j&{6gdQ zDZ>0pzaMw?qva!Po9KA7j?X<;}Ep+4sAIC#D@`8{14k6g8#zX7Z?r?SqVl(KF^@MB`zM=@%)erq| zJ0k@Db0y!uFXa1|nY}prA1nFtl9vcV;5!iNkZJ~9t!6t0oetp@_-UvE{Vy*Eb_Ln) zl$4Ds3TchyeAmsaTC&ne2yJg}V(kuWuPx?4oo}S**D6|Jy^$sfan?wZu7Z}Oshwao zC1ZWE3FuHlvoGNohkL|n^k8re6~*{Ipz;OI*{w?TA!Yjv&t*Q3vt!yO3Qs$T|6AnP z0(l74(}^FO3kD}&!=TAwEPY@Hvme2&f|p6x3a3xA zk5(IHlnTXC5zpWj+zJXPm9uU_*QX@edu&!5^4a@q6fC8Xh@#w>Oo($9? z5PpyLc0b?r(iKJnnBaE?G$Cy`I*&t%?QE9iR&)}K=fp~*uywz)nUU)Xs&w>n&S`=s z{A?oG1f`K4uC+VytHi#y;xS(3%@lt1E~b3ZByU};GOYX5W~W(ljsv&5XlmAPp@5ro zmt|b+S8b%%Rh!X%RrxpCxf9r1JO;h}4S012x>~?s3jaaZ>z@*$6K%o8iT_JQH@~VP!B|%8kNo z=DXVEazHo-gQ-$8Uf^SK9!H`^*0%OLtC7BLluJ@u34k%Zkh}5Glq_{L`FB+&?odf@ zqF5RS@v==pUO(T3*BoEueV2AtAoi`o>kJw~;caI>p|pHDYo z8cwHaCdG~SBWa&bbA!{3!Qkhbak@F2bbe5mU!`^3f$2PM8}q4OXVC05@HxJmSuBh*eD>7KNn!ItSytHc70TYfOD(5tsr;!LgAe6~d(asR zahK;g6K%!K&cSU1m8Pj5q9B{$hKD+PB>~*%` ze}fhp_8^*8o2pok+3l=&tyUwmB<>T|_v(JW*cE=-Zge$b4uHN~FJmM$?5!OYNbn+u+lgue)hh$YLcAIEIStSwKmuI(P;yl+v|J~YjTG`7mW_!Vz z+12pF12kJM8(`uI+dQJ#PN-bkofCy|N)O6!0RTS_b{QLJLX50MCfvMIN!q*c%0}iV zpX;j0UJ#ZgD|4BoM7Lm!JBRify=ZfCH{~VdWBsyX?bz*yj{{q$;dYft->sUucg9 zre>c;SVYXzH@uEgo#NrSCHe+mR_>u0%qzoKez1WFHo~j4Xk`Pj z56Fumw;KpxD8pt69}YS&0B~z)8>;t8;if_7$4g*zvlsY4XBR#cgRmOOM(Rv1&nQ2T zSEdb>;%N^C8yVTTp(LP8nT>#<^%SyM#fA#{@C^Q==FqCXVlJybUv4UnOLvsP;Cb%m ztLgKBR6{90ps7aQR{W$Ss5{ibZ2I_61!~nOJP&$n0_fz=XuICkY$Q$L%;4UQ7})iY z!dubh;Jopu9ATIbwZO!&s(EI4QM(nvEC##lLvaFcpp1sV&I?swl z5u@^Gno~*?tQhX%r!x&38qG^~ftU6We3wbIyUJ2d%Bz`B>z=8|nFbq8Xjh74xzR}1 zLV;7TD5MS1A5kpawk$*C4W^GM#+l>gRt9%5*rGS`*lefx!5)p`ncPb+F`8L~M_tax zGjD?Byoq*(lTXtn&LfIP@q~nyV+LfXVPgq#^RsZyS(GNVO*u2v2=!_+yT_tUy2;fb zXO_Y;r~=;_Rdj2moh26=HCQ-iPvh3lH9w=+VKT+?0;ro4EI+&3veA=bF0;@n8EHUV zkBM%r5%?1%}7(-q;-;M*Kdq~ulaj)ow zUkxoe$)e-^{7#G64=WftezApCm|YKP?5z-!6-3U#6Sfz^v0O&m)`XPR2iXFlx#7jM z(G4-=ce@~1fX4hPy{xev7|_^s^HX~EN;?ssz1#piP-4{Xc*9{~-lGnuG}(uD^n)SX z|F>2JnBZ1y5};Z|Heo&FO_Xo{u;P5%pe-Bgl~QTj=ynX)kTwq7qgw8%;$|~z?KF&R z;Q%5HY2Md^VmB>y=&g9gRHYoAn`wu{`OMj@8-MC*r_paz+m98U+1;A?bqf`b;`~Ln zSjsKvo9KIWc?@jG3{IkCQTX|6la_|VEuwFYFp6hT=qCyl@e*GBcxx<;lDboqcT*sT z_6?;`6HyQ=KSmv-jV!NshSLBr`!kJE*N8W|8sQq<56GV@ARN&nCIu9zET_YxsrH-4 zqDbZ_GPyJ)B{tpME65wQ;c!l-GTBDjAmbj({6J=DP;4vU&2hY^{R_=JtamI8J68e& ze%J>YxI(YW{c%DMtyS!+SpB_zK~d!cDoj-S%!ie+q=;{H1_dnZ9cnu%w!mdtH_^dz0SGa=0@EQ$%**v1m@gnSzB2WgG?Ss8B8=4G0AGjo%s}S zj7(won?KjuX?%hZayv?e>(R{LHBfOq<&{mgyzp$ry2tWk(J$ly8WY2+h3<^wUC}km zCNeRkHlo+y&7Nr4a%_M@fXz#cu{2JbS`GS6+)%NumVSJKHJ;W67n>$D8)*97CNTR^ z#hPq2tHqqD7GAKyx@v_R{4=oXKuH^Dipqi5LkWN%YZBkAPEgtmP+r5ScFJ z&Mw^C1BSqmHqf1kOF;T~sB~==e9Gl(28on~fpcoZmY)V(ZLXSjzn>mH=*Xp%Zu#{d zE4KYKfcWeR&G<8|5kp$jkNV)jA5szXw<%Ms_S48Z0T+!w)%(-!8fB0p+(+6!)AX}U z&{3Kt3&nWK_>9HdQSQ?0l3i$quH0&KasL+DVf^d~dlD8=&eOUzpQKEcDdY2qm7+J+ zf07+uW|a?71nQ$luO7b`DrcWP{u=-abTpgdQUCj92LE%5sGk4#w}|t9;!&EVeAoHq z#N9kLHK&c701u50B@e+*ol0S&0ZJ05wC*J5v*AQ+Vu~}LZJ&{m?yM!|*vx8E;{w7C zu!?{U|A=N#jufoJP?nAFCspgpALO-{Ibp>Vntp3%;c_mVS9qEK{oeS;Tb6T@z0o+F z&h4|5p&T~2obYRvdybPqepG>mnhws&vekGHkc}Ht@MsMaqhU$n(-nZ z%Q`8!et2f`?c87~AI>^U%Rm?zYGyCxZg`xUWt}`!V6DpR;bbWZR?7VYSHZSGI7f4cs0U>=IVvr>yR9hnQ8D_?MQoxjS+7ts)YPCe)9|Px3)1AogaJMMooZzhYk(lBR|U*2hOV)i zjip5}87g8E<6d;9>Lz!>@}U8N$_mXI!Mqv!YvWZEE;>f_&>z++W|i;DT5*>~+i<$+ z9iN=IYJ5Q-@nbhMSn+eH6byDRyi;wW9SNX_!&_)r563r(ZPa$B6Dbp6rOeOO(B#2d z+TS!%yzCO*$Y15{oYpbOEtE-qzvU+Tz=-FmBAi|E7L;ChqUuw*Riug+{HWTnwxxtQ?iVl`l{{)zg6ArRG7oOb1}5#3pNu26*oCo8NaN2m9md^<;~<0 zZr!ZO0j*aYrn$w{VS~S1r3^e5Y}?Kvnixzkw7q;E-J*P$-DJUAs`#r=)Kdi$pEBI+0uvQIsJK&Q&gsHj0?Y9yL&$GX_E@XpqJRVXG~q|v zs9naTR652P-^uGis~JL%Quage5-03$#xIxi!+XGXYyx`G9Bi;nZqeQbI$*n0fzl zzDO}owz4eDKIoz+YDLRAeOCx}$~v^1zLW(V_4}j$ux1-N@x zgV~8`zT7>WrIRg6>Z8OLu}*1ZN8^;qu=*!hOq^qyPr^b9DdZPupRh{Rg2km&)fR|a zoX~3>RB0#4S{)4z9rQeUtF6#SLuc$Wp^VdfL!sRq_s2om3*8gF{7|PE0vf(dd$vIb zY;85*4*SXXy9C@|z>a3V(*@Z)o)%ujCY8Yy5&0&;A8qkiqnQi~xsWvOE{531%%+2* zUxXmON89ik=K7xTG9}7iI^f2?>Fv)h9iW*2c_~c%x*ltS(@d8c2)3<;bh59A{RV>2a2v&Ux94m)6W{{dSgKT7wkU zHRnLhgln0cpgPWBj#y+`kCrQp1*yvm5a>db7yPGQ}fz zHSygVWk;;*5%OEmJ%97fdG+B8;zd0R>Y*2Z#cflKAOj{oKhVf;tTc;yxwJ{{4XIkaoK>`+mAD7 zLI%FbU~qHA_??A{1wFQNA2@ZEVa}i2&a&^;gdbDH>+LQV?hb19CkJSMP0X99^T`0M zEXx-<|NMmB_&U03neJ46`vnw5%kU_;1U6bR@6_g+KNfc_dq^gAU z{#-Uh`!dmH7NfKY!U%osrSXp)IS29k2nBu!9L7REYXfDehqS&gDw%A}_+mbCG)6Lj z52Z8s6!IkKAZ~*PN&yWD-HwLven)7xqoFXQj1TAvcBE2;!3D`pCTp7I<>y@1jJu2W zbe@&-i4F3g76B|a!)|V)4#gR*E1XH#w22l9&g6|`Id2w^5k>TR!SQM%xm)#7hE*iW zn4&hqd)1*L8W&Xn*+>*@EEl`C^Nd!kWPKV%g4~$sD$ie}NuYa5sAytnG$?irfp%~) zV3>=G;!$auaTi`HKAQ+IP=@oMkEEQ9W_Ww86?=4QvyUO#0R z>@3Ag|E!k@rW7XIn`Q=Rog;4WJF8ri=jE5P%FJ?An>+|+DGe3cW!LP}{PGfop?l#6 zx?5aDTm8UFuO=(a#1e;QgkrfN~Kjyjfl2@bt{f@ zhT?^^5P&ir+)9~1;}y?y^Fx$r2_>|umwi=h1Y4=i^c++ETmubu6fsdX@3s7NA~Y)n zRUr}hzno<_&K9VyZ<^N2ycuj_6ig*+PHGgxB1lZzS!7a16b|=;53A^cE=}xHH9uV& z%Y*zYT1{TAGy6rOQ9Nq;fh0HxOfMV~jqJ-BC4-x=ps-Igy>qwahXg_t>RN74##4k} zQ?e5{J~;-ka22ieHmYKo(MYyJnWFO)sf;l+(#y+rqw@1z+}=}fq|1$Vm_BLOkG)?>DxABEdKsIb9&R58ym{rn8r*Q06e!Hb63aN2pk7A=#_)5YPv zO>&~*d3sIGgw_17KJy`5@<^3FR4iA+)!x9&FIW8hLRgJaQv`N1o~roSPb!06u#%Z` zpo^ZRY`a@j;i)uT;zty&egOJqfnq+*XGJleeOa~KmAAOSaNej}djs~KJGJ{=#oAs{WUQ6 zw3>C4DqpP&+Q!bF#v9QFc+z6$NIw9dpt%MF3gMMyzxis`M;%>~_lv67Z0Ozrm4 zqbv$#S}UcTsB%IC=*C77C=xZ!QZ8PbU|neVMLJ+hrCRP~o?TJ(i`}cXbQ~sb>#_Wv zW-*D*stj( zw^{9?Q>Ko7!}Ovv6gr~loI4RmJgkUCws}Vn%IndA8zplB(^$q)W_Z)PM3+GcdY#R> zLy@h}G*9q`i_SxeJi}zP1kp=aUzy>)s!F*AG62TC)MdV6_?=Z%fx!&@%c|xsHC*KA zp7HeEKuscp=!Hqn~b!9{-5WMXx-89@QO$Vj}{P1}$oQbNqL)8@>IF;x5LiuN2?iU3RqJ*Xe zv>tEGY(gIjQBktfs&cOGq4DK>a2dZiJrVt!{M+YtLRGeH+fe;d;%lVY7#Jy0lN3(+F(rA*4X5ipK`*~F6 z+BRr^hBQ{hXb@e_Cl_-Y4Q@K8Xu?) z=1_J}esoouW*x+J%tbf=)JcFqpYX6AfvVbC#Y-pxb}XXRC1(PBD6aw+8_7dmZ$uku z8S~w0ZM2|%pw)V4H(FLg^ZC&MTDc0q7_x?o_>40A)Jk6@0yNj-QfDY-v_gzepr>F< ziZ_T?VFYMKl}spF*~^yl3P3P_NU@zI+>0^h_S4GAS(+x0ufoGqbknV%DJc=I6)8H9 zwWCxnF<^!r8q2g_K=CkM2<-8i!Q_8b%dM&#=}Q32n<_Rfzw(N$R1?6HZDbB<2x^0w zhM!$*luO>cY*)a-{#v=-*GC1by~G>gQr^zuv+!h7EZVTE5$>{`1Ly$hjes(1Z=#Qhy~qW+E_oDfRR^Jb zksSPIY>T#=cvNwmOSto_ZJPRcX|ZiDQYx3#z>1L0b4~k7og`%_Nw!fb(&4N^Q(gt2 z*+y+g;8$DbW|^Osehj7cEwkKnBizUa!$nG?vybjatc$h~@S;euz9IampEjukWagI| z1fg)K57Nps?og2K?hZjfO1g#`xBr%(qmX6Kwc>CMC1R9m7KpLe-xmTIonr3>h>d zPV=|P7REwd=lB9Qdj8KtW}QmjgrF$%8dNEKHi{xaeY-Crnrexz!Ne5I%Muz z#vv#_eipe-b*hG^B>FV0!CyB{#v?LXn*~IeY8%F0zij$ITYU6Z9!YF!6j|RFp8<}) z(z?pwhvNiB(lPs4b{%I^Z2FPJgouQ^h~6cWfnFjnWzL2z=`zK%>f*3RcpSoH5&_&r z)ydIFS$8un&rL)lstB?wUvUjK>vB#$+RdtOUE|>(X{}L1AFADo=57pVZjr^-jxhJM z8(F7=Q^!0M9cgH^*A0x!{&MbfvczT!nXRg?dX{t#0Zy^Hw~Li7t9yv{Xd5#QmvKBU zVw`pQK1Ld>mQn*g%QRteAAdFarH5o1@MD#Nn#cO0Vjs!j=E{I}=g@-G(NbG@C+Y*t z)oLW?ayHC z*@0DGM3`8W-j3QF*Us#$e940~oZ;2i$MGPpJyL)}ZTq_;#+58wSQvuVlXk4{TavvAUI<>|v<2y8 zZ!Wu{F#Ejl`r0-&qn)N{hz?&Q4M%rc=c95qVG_m^_!h%QyJ2LE`gN(>Is-=7nZ=+w~Ui3(Z}A z_iQz_6ger6r`iqqH7PY`q0gZ6F4;6Au6ow4FBW_8_FImwHq}_Cp}isu^&xZ=vx4x^5M15XG!9h*m z82A7bG;PjAB$?yTMO>ks4o>`*YFhpI=sjJ0y+etzcsCS7ys)ZMZZ;?DbOEw=itGSr z;sZ`i{%O|5pyN?%wgp|Vfv>2e^!P;~A#rV+I43}@tl@E5QK}!_vjIB=(3}#Eyz-T# zdOY{z^d5L%P+nyJ^7%|F#q8>;)7M;^ z>;ko5n!Lk2lzEn;Nl7~N`Y{#u}O8oR_+;%&zIz$t?7(Z*j zv*68-uAgA4Jo&ymh=&X?$eSTHiI9Hs6!mQTzGH^O2QFY+(HkKu$oZgdE`*3L5+H+> zy_JW0da!rAZw)T5MaaiOxAT1g?K@j(TW1kYQarOIohCH&br7XnbTE!NP5GfTcE-fI z8`@VA9(zWBnDgIb>W-^Iy-PQg)cwmK07zZwSYpRqL7KF1g2508l>a$VB+ zLUW2RYZ&zD=#%-u`V>mSo&~+&b9%B0^Fy)aIoLr~-scyhZsFlr6*-1XU<7zpKxCR* zQAraGA^-G+gZ?$R=yY*-O9LtZ;W9?vV?xVkch6P5cWm$IOlm^Fe{AK#_Psxk2|p1F zSFp-68!^J9VG~>h#ayS*4|zQrrbs}?SMQ2xM_h)Oy{&@^f%{z#vLt<{yv!dUs3%MZ zzEb08fR9uQbsO9K*1x~XfLZlW&r{I|i<%;+?gcNm4jbh_2ucvfSI{AKwl89s-OF)H zH`I2&1p^*vWClv5^Y~yP$GHM{*=788JYun(*KqnFBCq4h&{`DHD#AtmTSpJoLPefz z>%TsL@{c^Z|AL{nd0{tOTEIy!8jGt75AKsaTLYCqdw#857Ax9&;vt}8^KQnl*IVlu zc7O7m*O~xUN}GatfaGCxl<|yoBpyh$8w3iE>~SZ)zZur)@`fu8;14x4zfg%ggIugPEvq#ECIv z7gHdlkU|kdIRNc!nJsKgER2DUKi+|EjBHFSOl-_7%=-FHHwCkQwl&^ z;v4pPBh%+#5LTS=PQR;WfZ>5$0W4vhK8uLxwv#y7Z(D4_ARl|eLE$isg<@1?7@D8X!eY17@FpA3Fj+TyY_FU+&=6P!jH%) zGtFYid5Y_V!XebhXTl~GZ-_C=ASwz<5!WmE1$dadS2b0Yrdy@S=M7rA&JTL-&z#IR zpO{8no03djYrP))RI2%^E{S8jSk`Bf8GpeiOS7HhP||M8pfo=&?ggLuUIMO$`4Vd6%KDvvQgE5E||D$_IY2tXfl z+%pjKwSz7+g4Y5$S&w#*xc+EoLO5VDQ<|&Xt|D0P&Ca=J7@Bcd|1*~-wM15d^foKZ z8ga$1aYe%^8Dw2$Ps>Pynlr#k`!p=o+xJlf@5{BNgJlF|3{3X!zekn=q51^1b-R8E zLLBdO3onmd-q7*P{Br3`Bo_Zl!hS;BmS9C!7C2S@(Nf=O!Cs5hk-Y1i8mCuK_F-XZ zVM>Oa+3^ku{%jJ-C*ki?e%k5`#Il|YpdBY4p!(bC_A7;^$Anj?V!nJ{Ccs-SnZJdc(fp?@^5 zJkFU{-XAfr9H!4J0i!>Cg}X=Z`Dr@AJpPWxZTSl$UxM0UE3vk zLE}K|$Z?;&w0j`Bq~w)IFZNN`*9EH6chc?|oM*{8W7Rsx&zZ>9P)ZXX8dAs zG<(DV2Yt`wO7oEqkR_D1y!oZxaayEbB;Yj)2a7!*BZ$m+7Wn=3*9Oq+>5_t(yb9h+ zJhaLWcHPiPT;8Ql(C3U6O)rp&WYX>>)r9CwJCQ%X7#q?*yA&nAz!aLc`LvffqVN*t zXr_S2$KNuLFf6g>RXZ`6e03zK?)S_Y%Qg>cJ zCu_wyf~*~r8_t|1$R_9dgGlO49-}{P%|jvttD8N4A4q3qJ1mu8+um4T~_x z!jdZRP*5uvP*9XNhsFO6k}NmHmkG@|+Zk1W?I@S5@Ox?cMZf!@h=>7%)Anh3al_`= z(k%G-fNGdAo$l7FD;ckJzFk-`|CfGl23E_4Vn*VKFVVAE6|k|emeWsIuVD4ujX(i? zf^X6Xd!rLoWx7-xh%Q_Q1+UT<1UKeMF9)8R!_0`mmP7_iOG#l#XJimBxIdYE9ALZG z3_`}$T6SN`4QdSJRAzCn?>unk46Ye__!j5MfTGTGebpHA#e%M)eE&HUp)a}v-=YUT z))iYZW#$fP_F%iIfj=)8Cq>1|n(14H_DC_*gaVMK=aFl7z=y(QxMtq?{0@#Q@6YP= zR5RV9CJUSlpY~W&YCFs(12r%dI5g~MgI16-)~Cjn zjPc^g2>3Jxm7m8ncJK6)%#rO&ymGNqw3FnVpu;;eq6kbcpwF?NwlZ>#6+mvcNa-7K zip==Jfo%Ask}!A^MxOP7N4Csb*r7RLJthzk$%A8;;8uOnk+DKqR6y~xgR{E^j@gVC zD@A8nU4KPGf2sa6^ON)oW9sG%YoB9lPqUa+5uM@#XILHex9fp)AFI9Q>>7L*pj7lJ zlSg{3aAM;uBbbu3ykqWD)U>}6K58?{$d(5;ld@F=_itYqTsdBK2g7P%4j1o?d{$Cy zRVIy`iQ|Lrjq@>yj1wqF!Lyd0wqC$-~;`o8& zu1f0J{lWnWmaOvNCltMOek*VE?`J3VkZmcqMtZml$Gq&ALuIa<-fiHE3~87JwQ@x_ zj3yoV^-r|Y@kBRFg1jBb9a}1}P)uwNQz{l(+#Mzz;fYcsJLun0Mt`TpT+ZIRG<_OJ zsxva&5vm8&@6rZzCU@)Ur|LW6*=kHLd+iwe^#N#832EzM)6c8>tg9FU>E87fpllO4 zPoA{wtMcaNf$AqF49=IKK3qr)eO0V;rMXUB69iFch;|1~eo&wfzfZGqZ9U-%ewx0Y zLLLjBnpf1s2y-ar4jJc9r;ef!oP{7|^to&0o+MUVxWJVe?48O)aB2)Y#7rB66epz}_oB8I&tT%oE ziY^J4AT5p(#uahnrNG%osy2kul*HmUJ*(yJ*-U@3tIpWTcRnW{>r(<0 zHdUQ=Vyc$!8V`A_T~xh>(b^Jnj#JU?6|&1vDlN&s=U}uvAA*J_nqNF(`5}gT2R{hr z@oVI073TndV!ghz5U!eh~i%m9KDb3Y!@ackFUYw9K2FdKbwCzb0`6@}Lw>gbrjLjVdOZHa7 z#OqOz1&kCrjP`Z3K`CF!nI+285FRS;SdIGy&r}YBa68?U1mEekX|LWN*OGMtsB&_MZIU1BMs!zUbP=yhgJU)6y2;o0X@ez%h3sqklKlts) z#l|7CnX6Qu4Y^CiR|Q25c0!CxloCwY+f(M!ZyeB0BrSx^ssg%QOkE;nA z_oV5yiYppUrY!ZF>!)6D9BRqr-I#SexiS(|7mJ({B|Mapl5+6ONXBn-9>ufJnsZWY;i7q+`@28@$MN!tjI;e1(* zAUJ{*-HKF5;;6B(CDiJ^eRk;&PTMl4mr42pTDF!lhyC;{aiPXH*Ml&j={zG3x>urA zu(NSZs?+Z^tj3AJBI0fWEU|9Ebj_Z~&W=DWj^5#eMW{$j{7+;23ZG-Xl@Ux1e?HFnl9 zUqB+iGlzq-j-{r48Y4;4gMf$4scxVl$HcD)FKfoHd~+#l7OQPf%KT+M5eL=yD}>VW z{nax!*C5Fd^O1yVv8lrQHI~q#3kl$QqcfRQ@V=3K4A49=e_DaM(!O~nz04}Xn9kXy z@qIjK$2^7w{A!t>PU5ixzX$Sh7V;_qi6&c$Upb>kH7bzhO{Tt^nx1KS86GEo=sDuUv!ni=eC)CpOldO9A4Am%d{0h;)QOWVq(<$<|t z_M8CTwP)*k(^X>;j9vAepC%U=fn-aOg&0daK4WH4L^*EeP539ABf7`urP<^Ag37_2 zQlj5Bc6}t2vtsEgD(aX^bPCxfR1-e#jb8OC&~YCahM`k4*=;{+P?Q?a@yhjr-YDVK zEeR}n6cOXrl`^}UkK0zNHtwrn(QlIGW*Rib1=_mo{+vk=pQ74+4%3vb_a@OlA%6ZL zh3zpR>qtpw7P}4wZww=~F;PRbDjn)5ZS)*5fD-QDV>WW{n2*o_NMeFQ_2}rFA(htY zaH*2?gVEgE>^E#E?VnIrPZhs>J{j@Rz1rJ3K%}SEOmHr>evd*wX{U)-r76wLHp!;_ za3%vwvIxu8v+q)3Tf=h4Qqz?ad~yDrF;DqjcW@$zM2K9>rllBwdFanX*)v^NE4YoL?i9mBJ3WSe3q3Sa0j@r z|DHG%4I`alw#o$C7ilr@@FRz1D*OkvK>or~fYzrN;bjsxqw%8ONHwW$1+6M8-fR0S6wk1;E zW2`|(_K}Y;K{asw#qdkOSn<;O@pZe!g3m%53?P?FEe%&Wc@PMbhE7}fY1-4$(kha9 zE>hu^g2^_$DDU5tzXV|V&Hzz-5TpS9NBRFhijHcNgB8ViKO}~s51y9`Bz}m2OvL#T zqZ&yxgzLkqi;?=tRb>;+eIuX)9!q;iQe_|eD)f#QJejB&(f*+P3ZU~#YO-xB?^!&* zwh-Z>0oXn*V&!sIA!RKumP(FP$pZ5p8FNN>jcC`h=9dyu&?}dgnZ5B;ZpP;N)=ry^ zw{eynQgC%CiEGs*9XvElG6Wt5Y!C3HpT*^B)3LKi)mZLjYge(q@TP9hD(Gr7kqmp7 zy3dBR!vo0ABz}taU)je>DHM5s2rugZhu+?1l^l z4R-`F6dVBZn;$8)LFKe0O{KIL{SHX>%sjuh>%4^I;nxygW>)Tr8tR&Q>iNnlhMGbi zc41~AcF90qaaS#EQ$0CT2g?I=W^$?;v5zuCB14;I^7gVuvc=kVg;iA`J3E?a4OJ}t z*yPmWzQP!)r`zMh87WEr)QinUP1J4FtTe4WRBveWPlp4RM@_P_yDakUvX86Z^{0(e zW+@2!TnlBfp25Qc#z{;M5g=JPXc$shBuITBErqoDpM!s2;GyvUJ;H$675#DePl8cR z6&?!qFAe@(6i6#9_=kWW1?gW=5amGS`3e7zsDCcM?;@}Lx2mE3Qnl2d`ERhJ50O6w?0y zeyBL~W`MpvB$BdKVI1~USsTK@hGc|) zZHENpn;RKycV(2AC=6&or16sf&NzWwecZ@kzbm7*RH{%GVhgQ5>`?&eVmC54?#d{; zO5;0($OrwQD{16g7@T)y^io|st^2`%*yLAx#6j})H@1W8t_)YY(}s44=0BnT&d5Q% zg~5GS#`3DB#6ATS6rb|%j3(?`7(91nq`K4`{9RWtT(>ZI@5*2-FKU8?*n;H`dtgGk z|Bd!Ayek7cTz3xUuM8`mTNp-nWlVte1gar^8v(KKuYUEO_ZEinT^YwnTqLy+-{t=! zUX2Rf!Z5ijqbW<2`0w^ugq+)MvZB zw=m4^%5dIawU>ePaT(0tZN4FX3&Z@b42p&oJO+qgDf}~=_+!BF-;X@M;HS4REbhwK znl)Z8z6S;6hV^?pkTq{%Sl*T495(9y!*~CTyQuoNFhF-@(E3YhnutI_#ajQ~4kCkF z7*=;>44V>R|Gj^F%({hPeOCsF$^aJVbu*VS7;_wywH z^v!V>vX<*^p6?)M+H1(~p`iW?W2pWi literal 0 HcmV?d00001 diff --git a/core/src/test/resources/indices/bwc/repo-2.1.1.zip b/core/src/test/resources/indices/bwc/repo-2.1.1.zip new file mode 100644 index 0000000000000000000000000000000000000000..3253da62c3fdc29d811071ce11c696aba20745c4 GIT binary patch literal 70133 zcmbTc18^_R+U^}&D|WJC+csA4k8RtwlNH;xZQHhObH&N`Jo~&~y=TAmeS7b7rmAPA zx~8wX`ktw~r+?jYQlMZkKz|<;@p0n+xcRRWI1nz7gRz}0y|NNC5Gd}ogUa8<*$oy5 z80-lc2nY=IuLtG+skx;_L@4H8wSO0w|F^lhjghh2ztZp|afu!6*P6ajuA zER>{_N0La&JmB<(sC&>*SKF^XCO4c7Fs>z6CLJA)$C)>_>m&Tr*V~?d?w-b8 z#|92gZuqsbvwJ>o_xB&~5BJ|%T6KK8b-O;^kLNl{atJbc+u9%A+4w%sIW$e`bKEUH_)((oaU=H`zSvY_Pl=md12rW2)_@T>O$nAS#uUnZy%ZKC} z(vNwMxBP<+?Ud3JJ5>jL< zX}`laFWL_#3f#7bovt2NmUdyc4lBN;6}}@>m<{~GLz|x!Y^jPArbb#oIlEssn}aiv zYb7EOsEi$=Ek@*5AKtRwIL?$oRI4H(msg10z4x^>@I@C>f6P+${G|iU(KDIV(1Q@o zbtDaJJ|UN3Z7Q~*6RSdzIXA&lj|(bl$@U11?Y|>k{2NA0PZ{U8P${{) zPZ6a>m-5Yoa|qdZtPqlB0;<`;wgxzC#(S4KxhN5lZR$?1YGeD>@fgtPzhHs=sq=c$ zn)?E=E{?xs-J7jzv&@*-9}iAwHTU86AP@d9J>1bcjS8S-=fBC3#+f;%5^dXVLz4U3 z^5AM#8xGehS^XTuFAt#&1^u`V~MiB5tRY_ z6sEBGiRCJ=-_S#2D4DtX-svtD^I?n=BzU^)4xI3o=JOi^=_N70V7ar{>~f=CBye_n z><&hKa<>5PS3V`e$E#3wqEI8Vu8lfPwEa!8D5i9@3-|;S+w8p0cHK}>djgyX;?bO$ zyhA*4CN>bx`#UANxO{!zGlusB;g5FK3O*aZjep2eLZ4|bqBt1M+%l66dQ4txy zZ)1fTkqG`D}uk1_kU-z`*TOizXun!zskSOW{m$Tn>F-g z6Vsh5re~(7SXf-9=q74jW@yBx#;e7T0)Y`s%f@`B7$je?sB(|!x8HI@s6bGZ?)lYG zeSrdh14oU~>k%~!(RBZ>aqGYB;Ulc0zWrsb@vnsYZ|yPtTYHE9Wp6h*Ej?jBEhSlZ zvbVSQW$*HF)tAzwP1do=5w{q&$8gsmhJKo!_LnU9IMYWc2*mPB{CI$p7?~J)DpG`% zfW?p1O23c&_{q*kgO-o{%|YZY7KS0<23dsSNm71J8paY9v`{XsVYHOuB4ZVXeKyEq zKhka%=HTDIe_Wf95ot?|nnb_UFXr!0e(;#+x%I)5FYAJBO{FE|q#(dORl&(%ok=V*n4jzyl z6(cy!VID1MS8ZR|5!nk?bn`Nw5*k<&_e#Sr9>cPu(>J?&Bw8s+6a zc+;;XNa`>&>!#+>@?daqDQC+c;MSDkJt=%I{4d^Ld|3~?Cte&M9rOpQhf5<;1RiAy zNcds%8s7S*>5Fu2y7pe8?;xk8_Fnfw&%d91!Hp42 zw5Jy%lVlw|XOvVIwn%Boe-g21stPDA90jb%>ecXo08c6EaQqp>qG=l3 zUR#^(sM}a{@sIWv-w+U}4vtt_ZJ8WM3&w3bVoFwuZCgviB^i_;t^I+?!Y?2Bw)NqF z$b_J1A)8Xw*=!>_{b*4IvrHnQd~@iGz9qE_cca}WCbw>S$quT2=7OfktasFy2agqo zu)ADlM$9AF^{mdOf<3qz=;>v@0&+SU_@nfKx74vP!Xo)xirF?4`WQJC$HB)^*jfc* zoyVNLItV+FphgPI&w^x0vVB&Cb;Y_w)nStf&{B1rlgUXogOSjsTus-!%^akK7TXX4XN&{-`4FGg~> z3q4jajcI>PT?5^bvAX1na|rOq45tGFCx*g6X>ywyL#ZSXPbYP=859$USg@~s--1u` z${_Aw3!V7~Ew_cTrH1iGrdr+AcN+hjaXBWj~m`@ko#MJxkm>G__N=YonSp z$>muPVO00Hx!JV3%SoDne3XcBuy)7-fwNQ2OzH?`{_4EIP{@#hlh?U&THc}V)2Fl; z3fC=S7&RgPh>$8-EAeOh; zL7Htm6&}wZH*}Rbr#uuF9Wj}FJ8+PhPRX!L#Q5gMDYGkOmcmSFPzzO~qivvV4wQjP zZ;AGVAYOCdusz|KIp=rCjQVc*q6Sejmirx)dPf<+gIrhz0u>!wLXtQ4ZE zz#Wd^!7VCUO*>^M#^yvSusBPNL|5W^I~?1rW4||gU_}jKx6d}VlUg16p{*63RbP8Cusq>7z0+B%~_!BCns`g+z?F2WM}c*KNgx1PXca8G9d4c1u)3 z$F?aO%3G&w6f}rJ+06|o`(9R}y2jWHCu6tYGd3;iPKscY2Ueg_P#6Pi}gF5zyGF~JixLtkH@Mydlk$I_qKT(bJrML0Mx}+Gc zvG{NGI=1iHyLypQ@>0P1KdGljUDWEYU)V;eQg>lQh{>|nmn;PqCKAkR3bSsvIeIVl zaN1>gDuM^7?nc^&8Ae=*MKlX)8kH#;uh#566b>o-lt;ComQE#klW(RBWljWp5H3ZR zktyx>M)0T0z}0B(%L!f=B#GgWstQo~BGo~0SH@C~ijqTeD4hb)y|Xeha@0SU9K*Sq zx7tH7Xl(oSvR)w_zP+rUwHwzSGkbSkJPbZq;3qs%aZIOCVaJ5DEpH0jdN|57Pi9pt z9Yacqj~hrSoU_#D5)Ww*N17_>)zCj@@s)oP!K$p6N=2bx4BDj8n2W`e@K7wqK|)~3 zQSh%DB_u6V8LJ0bWLGq<$ObS6>RlF{09w7Ei}dQ$%7!Hwz5Yyf$s1{7uxLt#S5YZg z2FT`4m*x6+p!|VYW07*vU%AX)yR&9F5)=OkRmrCcEW1 zFFb#C(qz-~6lAvzOWV=07<o0@Mn6*eDT<72Z*o6XKp1Vgp z!5D)-1c?yofo-5XgLLrFi$W|?!wie(S(o!SM%Nu|7+SyEOdaMj;K7Y;&N13~R%h!W zbA9T7uwSct_)xHNUTy~}(kOAwD>sY9dv$)cKG-+SRC+L)w2u72WAsiBTI-bg2Ep+w z0Mr?!*r%5(0}oa&&nV>)0=D#NesDMu4QHcHBsBGoCpZ{n87)kww+S_HZIwBBxnp;< zspu_@0}9PHBMtX{U~9F9s1`D1sLV;(j7_pRvm)}UDStImlbDQPbkJXe{&w!&TsB7c;NJNZEqlcE8b28;nn<51j)eueIdBdi*T=yAeCG)q7 zt#>Ml9srmIlC9m05gaaYVN-Im(l$jckuqQZYETz>%cRF6=U^^ngVScK*`=gLPwTr2hpx)@1W^ibvYr~n)TYUt$Df)vF2ZAX4`}cPE>Fgi1)pfza1 zw9yQLWH0!LFCzA?LYP;nZd@Q&**s8s{8)Hut({iACXo(;gwrAfsSYqcGepe5fhN-` zg#(CkXHO`A7NIa*^QI0Q<5Xjdi<(U%%K})@(AmHlXZNJYXfqo_GC;NBil;qOaoguh z*Q2#i1>;QdWT|;VCfoO@mxI$t&K(DmjZrcY5z8%207zHC)9enObga(&S76H##GszF zXi>+~IxI=c@-z$QcZ$*ponp}^F1%^h+UwgHcsg?UDy$rd?(pF2z}+_e?0I2pM;NXt zSXKhpxtEw{4V63Up3!)X>!rFa=UO5Z z8=#2HIE^g} zY_liIEO&?>maW?>8U%I&f7O)_O{mJvWvgO>mSe8d!*zc>> zk$+I{)Wu3k{0euy!M+z}d>hvI zgdf(7F$Cv;K`JO?<$4oR6^=g12;epJqqP)wsNXz-$jZabSd%yyEOid@NMej#Gi{L3 zWrY>uLyfLrf%qVFwq{e)W%R=rC#i~V>mZ$zbF&X)=lIG;dwMAkQYY^)v<*s5o3GaN zXSE)V!Gp(HHm#_Y5F4Sm6kYDt`)WVaI4-~{_+?f|Bzrb1f-2bfvyF(28>WDmz$v*--q{JP72JPu^=-tMiw`nchQ_1%kGKYnK`- zD23*xKN>+}M<=gz+RFZbVhV6}gpTs!u0j6MxjR(V6LcJSNw^gwpar5q<`?@E7?;w= z-sF$))@&XZS~&RZ3@&7>zK+&51l5Zd3qmeBoap``fiPUxi`xaTJxtdIc{MT8aHC<--Vj!g{FcLMZ5L4Ur?-mLJ2vr2`DH6g9{?8K2uN`!+WzO-zaBQe_cJFl3) zHN-9TIitvD)SnwK@2KS6GFU|wvaGPWohyV+V|jlBxLCItq=U`cR!}GY!Kc|#%gxpf zF%p`}hxQaRN2f73VPEs|N3=gDhZhUn7F|M-niBcxtT~I_;h=jit&0n z=y^N#_5CUWusGDwe~hpAUT?e)uy?z>z-}S>o`-h7eWi5cygZhy z-E8^XZ_Vt`HgDN_bl)!9eqP)L>b`0SS$c_o9o&4bw3fer-fVRTdp)sxecz_h_iTO4 z)_Cuh1g#wH5bu1_EWSTA+J4AiPLL${o;c5Xc{qdnTyDMWea?J+p6I*=Ph0jV${c>= z-F$zHBsjgH`1-tG>3U5qKMA+fe}nz`+{yB}lp;8@_=`H;$83K!oaTJ3&iKvU#dMxw zgMWqCed%9hEuM709Qe{!PWrwR*rIy#fBtD*TsP~v+a~aG__X-i`M$2Y>3q04v;5}B z`dEPcI=tBNl8>d2&v{!$#25H}O};+*2LEU1bTpw|k?CI z&Ho{Q_nR_6rlb5dNj*IVhyg&JQ~aVCo18IZV`4cDjz~#1DGm!JQ$fW}QB8(aR^jR} z?RYIhKgMz6!kf{9~mT&!te6M|I2+}WLvf_@IQYy>iVbG zLe4-yY5D&FyJG)W?!7-DGtrh_M`C8u_)a$4OsOZbwmO96*M_!^WOD=s(qE!}qgt|T zRWlmYl!j@`%hOO25jLk97yzX6hjoDawgX5F1b@^PfAY_0-M@Q(lJ5BG)RaD7E;?pE z@Z6MSGIR6!x~a~vM{j)h5+g`scMxztpMoN$+MoJ|64*^f65u*sJ`&)*gNn+p4wqEF zP4gp4__8>U{RsthmwyTye|Z&CJt&Jd`6T1NUt8ZyYB8B_cUW$_r~6d3*{_fu#CY3J z^#N!l%k0U%o`#NbO!Dp`SRrpTzAm!Tq(%t7{;JB+iO2sMO2<*9!}Bd~IMTy)=g6vc zn(usGUzyS1JFBE;>tVJ$bJKdrN}4E{uPJG9YA`Q}?Q4y6i!N%0Jk2si0d`iwd}hmsrbF>oNp zUG+?IN7>@6I$Rp-0`?&-Jgfc=YvW9NXn^(e#X|H9wjnLtu}NWON6}&rc?ht~FvOX` z$()3*wIem7FDxj>z)w(y4Jr@n8eU(Kl!x>^w4+)X059_5D0OeR^igmgdPC{f?3ni1 z&4d)(fLeoDg%CW1986He)sUEd3R=Ydo=i5vylKwEyj8wTaCnfmsbcKy-dbf5W6C@B z9-b0q>SGrfz5;#gb#tI>$H)Q~%wesyom!O2%P~Aa&z}*TLDqo!4q8P1T{K*y^5JEw zeRp?k0`SL|j(1#>3ouPM?e)-F>U83L!o8Ttk5s=%^5>99OWCP>JryiM?<(7H`?f14 z+E(`B0dnlFl9yspxSHNsJYshHxRpq1!VXPMO8ey+DK%5bUSqbNqr|3YZ#)08Hi;Nq zd`nRFr}`J*gLw#cb*hMn4`5Lq1h~a3;W0FT%v|g zSMi>=I5(&ugXoD{#Up+a=MB~Jwgzvj4vL}^|DfhlxM?b@B9_Fd{X$(xra5K(4ufZc zr&9FBl_?XyJZ{*FN-@r)qG$PAMsA$*JiBV2tzMs7i6 zQziS>&S4C;kC3(!9A+e5(elDbWS!svHk@jbA)yKNGV3qi*5LsHC zKRgMhN~sTR5R@ZcsUBj%!eA@%GmN956~9N|`E&etR5c^BCQ8Q!K*uu$*Qfg!p|Kri z9wVOb7IQ3TAp39lOg-1o+H-UZ7_f3}12Bp( zAVPA4)Ut?`EApQp07|VkuYG24vPGzEM&#PL9ow_Y?-Y;$hdzo2#7g|0p+;XQHaSD* zeSKsEWp864ff|1F;E(s)z~UDO)w;H7#`-xR`&Ci}3MH-t1#2E%$0zF1*Bgy1Wpj zbX1L-oN$yi^J_h|o~2}PtKNoqtNWj?lxkEM+&6Vn$V|WBA7_{*Do1wa2LWhpI{xFY zTFe(i+9pRY=&!`8aj@%m4=3pummy!B0A-7{HR&l#$ENLl<1xnWLJ?!ligE)#-#ln1 z89hyyC&cLwaF!{07WJjn&60(3j2bEWw`ek9;W&0>^@yjZ=Du9}9F5lmYV+D`Q|STY zYsRjZns(fa+?^Z7G&Y=taa70kk3&Qxj{G24OnPX*A2b7IlG7PEDJ7VGGJ zgoVLtp=dUBS}r-o6v|7IrtIM8mpg`@)_Ii|X6@Jti__bTJK31stax3u!bn-Up`8i` zA`Eb@C)3kdtKoiRfdcjmC18u zg-S-FVbm|)DfN7%lX9yns)o&5(m>w)`10Lx4N0;C+$wi7((n^MvYAYpyhpe6HD1uBvfm?R$CYUefTl?tJ}Ae3dRe zPsQ%7V8~T~9+j=F$DDua<}AR+t2iRryO3icNrOA#cY#@9c}0>n_YsM(;J5h<6O%-)$T$y+cW|C`%5dqnC z%=#tDqDLd#@&U&+S=^eIXa1({K(Q~{eg7qOBE}c} zI8N2amdyTlAx{JaeOQ6IAF9N(99$$8!WMTR5P~C*3AZ+AbA;($aLvyquc&_00g6co zbg_mcd(mc8?>KtJ$~(%85FJF*1EN}=0bt!}k^HVQX8UzyV~TPANUWdoUwnjcA1j1D zgM}BAIv^ifP_xu_L{_Daz?GhcW%D`t8{QwvSQvXYDu{HKqy&QbCZNCRjHa2434`u` z(AVWLd_gtarWy3W$wVp^ppG2XAe}6+SQ5gusJ{NTux$s6yn_imY9-2QpenA~d#E%bD%xX#5M5}qXH z#G!q?=JNd=2al6dOkhB_UFJS?!BF1{&<%=i7%2~wH~dYc!(L91YYCU7#@@?GUS2Q8 z;+3&%r23eiqb-HZZN|0Y_oZW`vt0AWS*$~=VPq$9mrxw~b%!ED?>U4m0TU_2gvPEV zrf4fpl&*^Di%RxjG~eWQE?JrFQ|bBAL!d8r)RzuF{wnu_8W=U<*5XwcF0)DWnW5I_ z&JoASx%Akpu(gJX5%8qQMOM55Kv?3y&d)eSg5zl1M(^T>F2V7UP}Q+oeYzwf^C@}V z6LFe_=kobRI)+<;mzWw_urIkZI1cVB*yEItbkw0Py_K!#wFYQy-HOAr^L z1Z}5zhFh4}2J`EJeuJ}V&dPY)HZl#gl`9lqNrl`96@SVHlLd#RB!+h$rsaW9SJ^ zL67VA7BM+5)28#hUqHjJKAQT`Z2hw4)c`WDnkBCEZedIq=yXS*?!z-Zc)%}m+E4+j z(ur#I;bZPR2ku)tXkL7|cnagf`|Uh5k+OeEIVd)=a1kj#v`~yoJ3EJ$?)?+!MsO+6 zkoe>@a>r1;A#wYbhR(+N?!jfXo-~{h(?F}!fHDm68bMXVTJ#2f<~Wn=K_A`2#un%M zN|vb;>_On?M`bz(EIei9Se~AlwCzS!AKlcja&Gw%V?cDOS(srELEKKD`EI0g1$kMm zp3mdYK?bg&LQkA_jBULH-h3(bDfF=X&BDMC_Ood}jTrc>jpojN{d;T3&_VpldHv)M z31A+9X2McWml6Wdgn8Ccex^3~`Q&*cwH|_guWLQ@w^ux&;9OweE;Wm9Sh*kQ~~x8E(JU z3Kfp_>(A}o3E2vEttGnU)O*BM$NM}GoOzt>g?@U2`XQC0zvck=_|z}cAzmOZryXor zIsHksgaP4Nmg`S;&fEmBPG1m}8+q712BdWMNdXfYM{Au6SGkUfZ{VJaIlHg#yGfd?2 z-q(F#u4PaC?uVky%AcpApThP`t1_pB_E6;p3 zWxSd*Z2lbQtbK6&#Nu7PO6SBptPzsht{xjHI@dA~SWA-G>O(CUEDa521<+;;#-5ov z?QLnn!sl%4E0fLal{+#9R##wTIMct^9gxSe@NY_$REsD&F(#C>FrNtO%NQZiQrzQJ z++!nFda1%5^0oG#MHeSW%1u~jDec=*>jc;%A`D%2$HwvKoQ2JB9WiAl>WrQi?kIuX zfCU;3M}1cFf$*j|pqu>&rCTg~TD*q@2LSW*vak+INFN4+yR`x?5om)QDQ%DuNJR3|wnsP`WCYtW$;L zFRVnThrd~%vwGUiqpkp=vJBJ#=J##~db|N1*-A%P9cYjL3umI2UI4lR5|N255HKI{ zJ+MR!H-LMqhwXXiTWa=B;!GH5Iz`~0BlV@_CXdw}SVZQWf)C}beksRRbuIMrx)VhjEL(%NP;~&o*tWn ztQ*|Kl!#bNf8gOjdDIuLaQJz_D72W%4PDEFrgI?Vfd^r6%O;g}&hQ|HJ4GxCtF zUWJ#_gNz?a%m}G5C4`#^hfnjKs@M=~WWc@zX_XmLG5XdXSZ!+CHg16UDphDvh zl{@P-wcBL6O8Ug9E4C)8RB*W|SgsT|rW zSn|$5gJ)f_oQZoPQ}$|u8V7RhvX3t4KhqnDpyU&MoxcrrAmHC@G`HC< zhOI_6v_Td=89sJ>lo=j$S_BsW7VySA-Edu4Egm&=9u>v2A+4LT80?u9L;ar4!u(H@;0pMhL z+6?^)ZcAi+nKofjr-Sb0L?t+~ytoR24ezJ-N}UOF;RzlCyc(~(={)$2>7N5+Gya!w zqR)(;hhJzOJdGrw=6uJf2yewUhALp8&SB9oPhgWJYdAc`$(@bJo4Cr>Ky!xWDOvh& zdsxK*(M@}Aq1;$W!mMqV-)%Ix8SJ&Pb>=1a*h+MAn7=Q;J4tXMY68nUSHz8QNNU0?*Ttj}M{Az;Jo)(Z^E!i_h z9lR{}@`vmjDEymn@+?&OwT0_|HDd8BO?&GusyQf-SjH`Q%ua@9>$(I-KSEaFD!39~ z*COm$o4*THJeD7OAm&3^6rWP*#3*TTP;^qN7|Z92`?PI8G}-Y7bqto=5iRR*&t80b zM82T1Q`4xhZ#XoKEloHA z%5Sd7F@6oV@QHOQU8i4@6y{L@v%?t7b=z?>f^AyMCMyTc*uE2f3L%)^?FI;gQI4&x zxQL$sBDb!kknk`9)uBB+pD$MNy^D1#rs4M%pxckiV&$u=rMy<)pTm=@82GYw`Gs`f z?FUDIUNObSKSNaQCDD{OMFo{iFc4g9o(KH>^nM>zA@Uq;BiNCF+PFSxKPi8`{9%YJ zd@qGL&o^V#HP$d(+J4DKu-A;xtqfIk1a>TkS2XDy_tnQ)q0p=6kT|i_JTMFmtWu%N z@_6x-!F-RWusT=2aekcAdb5S$zKEbTJfF?dQMDk3`_&Zfhiz0 z_8L0}9&~5)5jnsL1a^dBL)u4ds?{rs!b=%;!kk5D-T=_+g+Bn7ePyTSJKx5C7*GnL zzMj(8Yxf6TJrZ5SIKmtRW7Kz((BD-;(y6k4yq?nxxyv~x0}%^w7%H`8l}AQ^j_(}b zQ0s7sJqiDc7^Y|HC;@^x{(;E^{BybglhqV*83%p=idwOC&xc8hT^}czYHP2coJ1Jf z(i~o7Omenb#GAxuH(hmA0L*YZCn0!fgVNjn4x3v4SXeZ(6yXRyAXfEk)2nPBje%7i zS}z#5h%g(d=DdWAfz-we%!+ix9*`wn!;F}qpT36{NEHn9ks82nJaE&*l&&1H zYbR~6NM?7pFXtA$htA`T3otTl$dCL@jXVi6u$(hH(Z@*w5@@-T;8Pr7yI$1kTg{E4 zzO=w$qKcoBuRt{A;>t#4H%gy>i1GynzFplbKEW z(-lgkOza08wvkiAnp}6U+{h_!3+oXw9 zt5AxmW9*$0Xb4m2@`MNNTQK{Ya7mz!{?^=%#Eib*F0@rmvNgz`{g_01?{$`Rc$Q*< zcu21j6We*pKNnmFlk>w?a+TWQ5E*x-dwXtrTy+Kx?48@C6na8=Jvnm9oB-O__yFY} zK)E3{MEX=jv3gLbZ+JyqeI0VE!H_s%cZ8+)C;()7QpfHmkyr|!^ zMfUxYeWDC%4|(T&N|)y@0R<8J;Z?U-5c&wiP0C_w;Rq*a+x-ELrMdH=}^9`SrhoeK6dMxm*T6?+W@`4R-&!`;>w|N{4w}M zyJ5aE-It2R7%mnLzbDK%Xr9?N-&qzAxBXO-oO-R(Ie=;Uc(}sWr7kRbkI&bi5}^zf3iW$HeS5`xix!J9utTq z&uyV3<5i5qHDu9jfu_kvQF_I!-8(_+*mm1QjcIwm(`4WvFaIND#bD!PzAs356JqT> z(Ko@q?a&UZRSPSOvXd8=LSPJOiQlQlPv{-1T6N*nD5|xoH*ul26Fx$LtI;4L9?=OF z^2Lb8Mhm^&Stq>G2>Ic2>RPV6VnMbfdLsXrhE`zMRpW zkNKA2ggKpY?2d{t%)65d^MsHrOcBcoZ-2SlUZ<9lAK9yoR+kO6M5f^@vLGCCV^7}x zRVxD~>9VtC5q7IMJsK`S(g$QTG6HgaNR1A$5tBa+^kNf!H_%Y99Mgs0bC6}CA`HI0 zGZVszs9_7y`3{NX5!;P(E(@I{;+=c1$W{H`VDc7}>(b-SuHzKlrxwTLF<*b03H(Df z>7<3RO*lijYZZwbBwRDiYsoKZIAW9dFa}KNoLSkCV){De<2gB}Y9D3*IqcOYcIBAPZP$G3zSILj4>&zfD_XIEHZfP^5CEjM4SK6beHDTX2 z74?aSiC1A}t>qFK8}j~8Z4GZTSM^(GZ{@-cvT*tLoXXGW@&%_D^7`>ZeWv{>?w)>+ zJpEV1Uh0*Czcz=Bo(jYP{HS^S$B1)zzG>#7=ou*juy>t*8`#Q0BmlkqnM%*;A34@_ z)mtuSRd_m#x@Yd7L`VhGCLVfKrnD-pSjYuOd1>9(qO)yIyR>D$>wvso-BUR;zNNQ% z2YsPagui1|_34<-ERpdnR}VN!%l2` z0C4_mRWgGqGPmN$Sjp0u`HMB;K2h_{M4qZ4&0FMPl4+{V$0coUu;=tK!z5))3MPEY zGX>LH0oqiGLR+%uc#o=KV!jba^(#d}{RNGRx?9Ltw|VgH8;w#5`)3B5ra5%8>f{QILYmIz}Lg}5oyCA9N8AK63h&(*0Uuic9wQwXaAq8TC6NUBU zFzYUna3aL120oB!X%G{x(7w}8uV`37(bk{`1+4jSLY}kWT5k-dB6=x)xlCv)LO&lW zWq8)%D0LQihyXXpWdc%qe|cFj4{#(2!(4xA=aH4fn41AK7D$|14og%w$~QgZKG{}1 z-VeDuCJ*${;sx=#V1FR%Pt-uFsP$H!oY6@CFom*R7MQ0|6yDeVgGL*Fbo(|5+El=+BU&W)iPBXKrVau)E3%N^=U z{xSNzU1mpo#VhV6e-2Si2oKWxQ>g+JWj_D56igkj`1QdU^+e*}k*v+K*Pofy8bIPc zfgryEbhC3R5sclS_;X(o0E?i{V=M5(&x^kyD6=0FURA*xz{GjVJ^Em_5GCSfz5JFz zn*5BtfRq+&rS<#=;1lnK56E5y7Ugik0B0o9rW?ZvD+R&QT6=Q$ah!5Aa&mKsw7|AG zR_2cmScRK-&wM|)zLc(jvn)gWMptjPF=-7C)QX|P7v*q&MF3y~UH**I71zwQ5 z`>q0cwrlsGqi_%f+1EyPTC6W9xEC+*18qp@-!?KI{mI-|EUTxA;d#5U`;cg&=(r`B zBb3T$X^*ZOLpAL1{2>@?lWU?T>{>E!4UVNIIr+;dQZ+!cAt-}a+7QE|oK@@hbWwLF zoSa+wV7qb?njpIucU5TV1wP#<(oaHSE=3FzrNqKktsSg3Tp0%U0?5{sm z6dMWUP%ZCh>G`%EQCMDwDr(Pj5-A;C5!KWu2^|@%H+Amvvu@8PC=^0Zwo7yG6ty$S zJ`W-EFwuZoOC;$FCb^7HQOq;C&Q7Xym=L^fF*xv7IF`++rrR6#!2xCW&ciP<#u$BQ zoXX>Ec+#@F%X-+`pqE*LhtsEd+ISW0UE@fwZ8_HVKSjRlvwk`gN~dt>v{x?DH9-O3 zc8IkM--gtS9~)Ft)k`8op7oBuaS5GcOk^Ym_QTNadq@dDwFm9s%jq5Lby9GnHo8Zn z`d9)eycC}Y5OH}vPW-m(kGk~lZDf+z&s2ClmQf9Zz0m9JWII?B#cm^Xj2(yzCmilJ z=l%#iB0d+WrviU;cqJ6?eDBJ zF=Bzc(qb^XfTFJR{35>&?toy1yAxCH-WYz#!~T}Ae{SI|tzt6h93I>`bA>A;%R=!j zS7doX-7`%!bU-*|g>S|vv}R!ZsfWS%Zq$T-eHNY&Iw4T6!O;?&4a(EyR~fgrP~5~sV~dh$AnUyy%>Eh>E`wo}t-A@ln>rhOQofyche8O4y8am~qdKGP=o9n(v#%OkGQbB`{X;G%V3lP}B@lB^lc%^8XasV zgf*2+$wWae*0j@CrBVvSlk;jht5a+SaRB884l|^|npp?o%Eg(J5(;@JzBRwQp6bC! zIpO(WKC71wskNo?)ydO(1TlOsl5XDMI4pN^R|*hO5u?kc*VD9^BBNgfe)K1QZq#es zE0$ovOBEP&&uJkufLb}#79Fhud2dB>e_1Pzk6S_xQdW zxr=73NdC@PIrd(gQ>6oG?W&={re;or?P>L%94|VspW=(flReF7D$0=g{I*m5T7o8Tde$yuvQ#xzm<3vYG#6r_QS` z*2s+te9#OrJRhO%%9HyCqCx+iT;8JujPtf8C+=;iijmi2yma2YJSP@mkkbh_+l|Kf zGoxqwlL5q;(vbNATXWs77eK(HvV_r?-rl5J_UqBcp&X=3>r34;0HtreT2h4 zh@!{j(o6#mwNR?jULX)1W&zZ#20=>ZIPx;I7wy|qi#zNE9hGLMfq}p#l^2SL#%;;m z$^<2cRm)(};!^S*ohy0;#S&?X_IIu=A|5cxF>Y2d=-qjC5kl$mh{u+OS z1vvgUy2m%JKq|l7{Gm7%^=33a!8)K19|9*Qio`vwh+PWqM6as))5;JON}(9C8a2~i zxbh~qcKrpfoT}c@mp;+OA>Voif7J{yIVWWm^y8uTQ=Jd)D$ZW;v^~=3mpn0LM;P7F ztd&)P^eZ@pEa|cU6Rt^TC;C7t(hNCJ+l^N6${tO0=vRBVj`%;Y2#%@p$e#|NmD?F` zNRMwr{CX(J3+bt>{lSxIb8wSJDLAL;eU<)krRzT7vUFz#Y)8z4W*j*uj-8jHc3}0x zX18LhMKn;=qODZ9#*L&Qs^gnDJ>$8*(7DRFeeydS+@M@O*CQiWox3f}|&al|N;Mel{7hcv0 zz7}hg#vutIZk4yh8B~WpJCh2ao4ttS(01oz#tJVm1@*B>#!5K2mbnzU+`tj+Lf$u3 zns&7fyDQC@)ni-Qne%{ap!8wMx?Zr=-uG0X>(KxvqH|MZ{3lt4pD$A~U8v)y@=elN zf30Ysp!R^zAn#8rmA=6oRLHC;>wkFQUGpmwhH+EriD-N)>NLZW;SI-V5%7Nj8Q7=o z>9c%#foI={%6?6JH@g3@BdO^ta?@(^9QZar04*oz3CbHUQJLEA+?|zQRQ(Xq_XRr+ z)I1Ef6<8EkYm9`b_Dnx$ay2jTi*a;SUuWyT4_`^eok4r0PJLzBSt{e%(D6{rlY|rq zawK{d9PjEfRtq)fFl$nOmGjqEc=>+-K|sF0^X=9wWkS|yfZrkR6x*TX_)ykZY*2CA z5f-ZjFEeWio~Jln_9xjGAt!ZVqJwv-0AhjZ%7(E{%g0pad`cBMLm685 zFKs`5)1ZV96;bQrE=?W-5)kzt2ge^AexdO{C=A5?k%?XZmS<(fUYgBFfa?^TWl|Q+mO?% zjBitFg!r5u=l2?UnDX)Abj0LpnF&lV@cbd+j8(_Q=~dk>cu94(SNR;pom%zMxh?Wf zXoyYn?~)URmwn&H>tc$8G;7lD+G2@9m$3qMfgBbYNgx8W}7^&TlL1Jnl9@MFJ13|qf|-dd-(fN z9da1P?e3a|Ti|!K*DrvJezrn~NECNbbp(h+mG3(^BJ(uyb5HK$VWM_CR$Bg&{Jm4$+$*nZRpdJAANbb7Y?U|KjBhub zmR=m(!jBPtu?e#=8KXioR(tDWsv#Ra483aCFnmR;yhojk_9TE)C= z%RrudbsHRz+%H74hNKo;YVapi_kQ6;b1grj@_`##DKeIY@qDu+v?Vj4qx zZR=;z+RZI}GfVDTPZ2KQR~g|dVQd}|8U?|&^sS`G*p}@;cE@h5=l^2!jm^wv-%KJj z%509G_hoeB)62`N5X8OL{?6=A|6|c+^wr2%_7y^N{|`XR!i3G3sQ{w3SIk+;P!P^; zBHB((MB62b*z`Hd*n}cS5r%v?{sFT;K#4=D9vmtNv50Amfub>yQBDBHZFpl>%0KQ> z0P#Lh`ZK6X@DAmd(%H=_v9k8RXnQeW7N zsr3NNylAXSmZ}*ZqI_YIT!VA3p)4r=%PdQu0$A06E|*VWkZsDo4yfI*a@Cg^COxFI zhwvzSR2@pgdjW~f8;;Cdm zc3}4TY%nhiP?tc>Q)%`RFkq*~J2XE(o>X#&R@MOF>Sdl`c=(`d&xGyD&S(8Gl!Tvb zq*|m&2DoM!bm9J*U%WsyC9hEJeou6wT9%!m(@6sYQ6@4kyp-rsA$*VwqaSp+eXFXm_%52gCMc&?c3Qx}@b0 zKqOjS^`Sz`f&7FDkxYu6*Yn?7;+67g@cP$KA<{q&%kO1->QoQp=Qy#RU#ELHj*2J- zl3b@lappTY6w9F%N#Teq33c6_7?bs~ZKq()hEwBSdEt_VPH48Hw%9rXm ziU77tlbcEN98Al=h+vCfR%FQGjJ?S0eswSp_S7qHIb^dbc1eNsJ#|QlpS?`U?44l( zg1J`ZY8Q$Vv~yP(Q`id#`It%-+BFzls%wH*c|E|ptWN)%jdPB)yl4DJKm$X>E*4n_tY_)bwDn{d_V_`_FafN!5MU><$jo$j1A@v*3vEDOpPF!Jx8}%F480 znIa}~KjSdqcTqa19zSP##m(?9zC=?;NfXr4Q2nSE`rNCSLphG2;=+e4CJ(UffjIfR zmET?Qv;A$wdBCy<)PXo1E4+j`^5;4nvWYM&@jcD-d`{_-?{=b(s3nGGp$ed~sJ>oY z#GRM9pTgA3-(XCWyEQMk-y$DSkuB8FVta6(NDla;Dj6ftnHRpyN{VzwTFx-7o=^hy zAGV*Ih#9t5SqLueZpt$OQ=Gq->jU0*%Ecu49hEYsjF)V&#dMI&`wsCC&B{pE#rHb> zY_Y-QqbeZmpLTb$LN5cun81WgnC<6vk-_)#qu}{On_Vxyv?}k=px{Ym;de4trO*sN zpOgJR4O|JZ%gDS3*HBFtt;F!MgEc?=H7(^igp#+^CZLx~w>A0;lvr{;_kwhm1fmJUtT3UG;t=FcvIgFelt#mv~g*;@6Uvg*--#S61g%;aOs8!Fd9#|7EGH4aZMa8*RucyDa2&sQh;qKQB z>hyGCH$|2w8wy{^DKl3*1O;-NCRc!VKGLo7cRFDDWB}1!7U`I!y3ku?gEPLco9s@} zXcSekPlfWd{XyeZ!+wdOF_^7WW_GDw=6j4mw4hfnWb!~*m|>cULojOMs$uc~bmGwf z+@dkn5vneeDwD^8g^Gp-W3o(d<{7+^&dYF7TydA>hZn(5jdr6IXztyXIK-lrjB=J| z^%i-TSnA2Yv@tj!wp-KqUtPQ(<3l581!b_)Xb;NSFQz++e9gd91=AeyKd#(jLu0<- zK*p;{mzUgO$t|N)=LwU-;rnz7!8@ZB5vxGM+hdjZ^wHFFfM{(3f-218ymgFi)~&3>3XDx!3{Gv zPzbFE8}qGZ>T+tCiCkJ@*DTscV=aZk%B7OPwJ+8-la6cNSyggc-|Qt*%Uow!!GF`N zCF6hO#fPj~I^!Z|?IB>ev%QYK)yvD6+pbF|9sb3d^%Q0EI>xMCI-~sCf2J;@uWqZ6 zd?j`H_iE7izfzY)NR{?f**_HEzs806R)!i&xK>u$7ic#IbNr_lFH_3ZeB%;AtsNpZ zZ8Ch`Ww0YlsdUBsR-VU{?NknB=EYEC(A|9z^hyjYyogQ6b2#iC<^#p+mMZ);1_j%b z(uJ2cV`<88vON`!g<5tu|sp0tz$z{ z42Nk{KMwJT8h2~WT03ZJz=ip(A_~~lyJ;5%U>tNrpK9Q+@I_W81&gam1Hg?s0hf!e z)%%l#7F3=O4ZY5w)NP!@N8eqS%XOtM&z>u@e9K|s&&BgTOPKk-;%uwdBeZ4P;`MPV zMT=}YD1QhR{WeNpb!rwI{yaNCE32NMm-~tc^zsqdBm>u$^C}`EQ1&J%-*~xRx`|L~ zRy})_^=CPB~*6IhT1^1e4}3R3xfhPKesJ#X%P|zL8F6V@ZbwsGqy+2L!|8|d+Fw2m|VbU0qY&K z-NgrreF1bNq8J-f>>@_%=H`!ivge4g?dN!iWg~I-Z8zF6f zvdBSz5x-j#7VbXEBBHVm)i6(D?j^9C+1pTH_reOsH`^X0(+_DOR`!--U4sq)hmkG0 zMq#`Rfz;z)+G3QTMWT+Uk%`_5CQlMe`|+iQF;v8lssg&%2JQUKhEMqmTO48e$$TnF zpAM4-=?JW-7Q}7get3c zbIEH@m;jPqFQwi2t6|Sm>&X?sTA=s!y6U{jz>DsakR_r8N~1riTD{4Wv{J=SDfSj0 zR)&;tNbnYVp+}<_w1bm=>WCex@=Qvbog|^x)4g>se4?3*r=@-H}hombG$_JWQ8gY^z%Cc~skI$m1p6d6Zxm+2tbl zM&mY3+}J7avG8868BCv}xVLMx=qA?*Z=W*DrYgkdAh=xQ&O}>u?G%+-w-c2wTIF3- z*d}dt9_`&w4^CBi)%L@WVT3y?l%cQM;)j}C-OjkJ^Nl#-%ht-_8eSh0;xnD;A1m$x zDc)rA9T?u(4&MYFG)nW5ueZt3821G#CK+hi%SU507GZ9uuQM^3xuZ4F(JA{oAe57t zLz&I!8Bo~A5T;E~N6+Jo+iX8wM>sc1<5S@WzhH{1s`4o4PSs)BIJF*rV0igQc#v^I zkxvVzV+2RiqC9l|j+l+l$?`={w!}!dw;xmF_&P(Oqef`mct?3Xko1yvXAyjr1wy=E zm46}k?i0hVhj_le7CEa5fj>j#qqTbIcY4tRRo(>TJXXVdx;vTT7SE4YKoexk6{dQ< z_55f}JfO+BHsgOa)n(yeci_VIr)$Rfw zAA```XHyoVI8qbm5BH1eFh#yN+#V>xj||?c$zv-Rv6mUu=pC*JuT?%joKc)qy#VvH zI?N-B@c)#hB=0NYrv^p*oN~`67oK&t3zz3hm4emU`$|vtS3``f$aYXwVwO8!oMn0m z*ervv70)xvUC+IE1oZA(c(&ha+JmZgxzXD=+Z11~$`@&X9MuW;TfuUye1EH8NZy1<%)~nVcm~np+tp3^BXQ6^ua%8nk}} zA5e!vsw@JMnY&~On?^frTtvHN%qnYZ2Qpl)zsNsOhO&YdH^j{Xwv0Bmpv6n%6_3ly z%{K@|X*C@nq?UqHmX&!_NNzLT3TL!jLV7W1j_mI2X0`kF>OdA=Qzk8kBK(n-K?8bd zp+DU4f{GHui`9Y7t$N|L3Ta8&tVMC$m4F{KSd2waDFLaxD00JQs&~~LA&lJ7n=$k!vl0@IvY$I*D8E7 z9#tihxSZj?d($IqYDza#?=CW&Xo;FE|_B_$-Jxu~hvE<__1# z292E{J2{0Y@6kEhzGgOYol>3q8sZAp6RkJB^iXuNdwqZgc#6IBhWUEEq1n-4Q3)vdf`0pBmYN81EgtXTtbuu%jOKoC{$UT~`6^Xd9NX&-(q$Q2Lo;Vaa-oCOdhW}_3Tr;5iIYGFeNi6HQ(Ftt75+E2Up@= zdmGNBY_O4iZiow7{rClt+F2_4#l6ZD|6|G)$)MHE35_gGEU}j%KQ$EDj;_q8=(SDv z(N~yyFIO-b4Ghoi4yad^+t_yXu68fc40#3ZJ8^}{U&nYHsdZz9$SDAYVUA6|#c!ot zNCi4$Y}>`pgB(9?(G|W^C#iXhcc+7{`~p*hqlKkG55AWJIT8#JEbnWa!TRf}EYg@=eRB=S`UFl_gww!~O@GV9y z=e2p+RLFgW$*j#r!}*r@+A!&ICT}+AfbF!(al_?{7IV&0?ZvEY@i6t|TNwA}yHu66 zpp6^i7oBoD4RUm`WtI(Ia!l0^J}?zX2VMAh%?tLI^6(BOmn*=qd@IOAn!DYfB+XLPCSxhMfa%Q*HJWP1`n#JVBUG{_^jPJvgpQFet zTA4G2Icvd=zvvXNSEz!o7Ql={gKewfpMG!=9I{_}sqm3MV5r}FYbkg z;U_CyCeNTXDh-w1k)nsE*fwU&o(8|l^yB??=6q_1XH<7jL-;j$0p8{}90!Cr;IfK5 zz0TwT@HYXaqD{(MJVw*pst4ze{#rt=z$U`tA;;E(=oMrjZR7}l#d#O}|T9_ff4+Ya`BaA<) zDkhDTD%k&Ehm^u0=;uqBEV>vE+X#}|bzyTq{jR+;?1#Bfsi%xGKAZ)5T@(-}y!^SZp?eoY>M16H?LLqWR00?A2eJ^pmvp*pI6 z!T%en$vq&C|3Z(X|I)=CTsAP0Q%tA&MBPhimY$9iCbx>yRd<0U4yi$nDm?Fj;j80{ zUXEcsrd^dQVJt@nL7dHZnjtx0R!m{Hv%U89V6U^mWvk17_2FOh%3U~!%MO{1Bb_bu zH-fcQE2dt76K1cN3Xr2;fV@Lit(XMlJ9sr3z33)FKB{9|-uxG?IW``%`oFC448FPt zTJqJ^1lj)ya6z22on|XzW8ySQapWL5y9c(sKg2(V67bx(s4+*e29pLf`_qcKNg2w+ zIZ9uOKhdg#P0BzNFIW0wcsN5uP-O%S8e||!&>Y}czt}{}6fnKWg5g$UL)@#nOp%u{ zXCNx6D}p5m=8(#>2NKGh(S{sxK~-*;j&Lgh@BLA{5qGC#Yl^`zsm;UOq)j(n4ER72 z1S(|C5C1}CEu%GDDx3npc!4^uNUy~)Q`q>3O?y!D@&`2-X-sCiOw_J>FtvU#g zE~)UxY(JYr3Gqn9L1@X}01Q5E(;hAd>jJ$_M?DI{IDtS3K-ZIhY4DdU8L2AtEPt~k zwG0jDe)2&-QU=4usAhim-+O2*-P#r&sLQ=+;o&o?xntM}Co$p2SCw04s1yo5|)M75wgmLi=NtPh|rk zu&X1{>J2WEp*@FYLowx`}s_PQS5fWsrv?L!Dd|7B1Tt1GYM5HZd0}E*-4r>96Ucq zVRD6H_h#3a{GW!LB^aO1>_i<5vU?q|6f*K1LxCls!N1qxw@W|z8(fq8VGV4O87*G; zCUn7Z8o=#pV$Q)Zf=MhK147E2xsDU5Ws^9hLmDj*$*>d>+wjtHuAfk>;B=d|IAG$@ zc8RCqi^-I^_`QuLe+txcI`Omz>S90X2X|shO@U$vs9{Ts{COv<0T5vRnoTg>YgL+c zFlG6fD*uToJ>bql%WnR@R`}_wAm7goyF6ua#fy*CN0LU;s+!1%^eiV^pfy<|`o@81@!lKJZ z+Uf73F^|u&F+T-ii3sl%Uu}pkIThV zSD4*!2dX1`Yrz;B8m2tt3RC87s*{#Wb|7}6d;JJXgkRTEZfP-0I9y&r`KBG2jrm`d zZ=o-v+sgGFmz!c6TRz_gvvfMq=I31OHiE*_a$k=+?sOHJwMAZQFnKu)27dF|)dAK+ZC(eclZ897nLzcbPDqil9FI37!o{n8T9NerLa)%x! zFQ_>9*$u5E_u&fvv2Zq1z4vDv7b(T{*m9NGr=8-~4nF|X%Qu+`Bn$I!KHnNtBhV_5 zEGFzCBm+fV{xb5@jfXsc->+W6n}HD_y#j7})eoA+h(4p{6|ucL4FiW!t8 zowH(>gFC%RPeUm5(H|xIy)xNqdZTwXvYfb;#0?#Isg-Jp|wU&55F)6GD>)Mwm z|BkEmq|24D1;~mbn0*;I3dX4i?A0GZ3yw?3(;N2YI zvi;>TRPBd@YGFW;-RdWBv`ABf<=&tS9nsa0ELnbJDC)xDJfE3Z`iwpIh%5}RV!}Q<(u%w=9Zgo zT+C?mMoNb^cMYVCe!7FFRXG}F|5Td$ID#UTr&9c=loXftzJc&a9Zxie_@f@(qI<<< zrk{OJR~(H~P#!JG>1uPc+R)c%hm)16_?Iqc0lkWqw58B*biBSiz(G&UHsHn$gwgkP zFBqZuAvCDGlv%y#Ccds*2t}U;Fr0O)Y>+*UI$qX;(R!U~hBng1B2l9vR!BeVwbAqARLCaU z&ak|op_Y+S*$#S%_mEz;N(BtUKQJk*78V=1hk5Mhbo=j7t{9#xMKOc>Xvd4GWVs7i zvfxc74cb5JOw0MRHc;e8yW$v4KG#aIKij1pqpz2g38PXw$nHR1JY$KE4EZq!!`@EY z&K8mCyD$iA5YVdT&XjZ-0M|vrs<`|mX`w}tK3%i3nS-&`^ zqR1_@Edd&`{oxI6RH~e!tzA`EblDDOh^L!WEYPM`K9}Tpz@FRHsV0Yd%%8Iq`LJ!n z2|c$W+To|20S5xw0*=ZQlMhj4nJO32c7?uHNh=c5sg{0hh}W3=kS%sI=7RLWjr+{sg zzEBa1O+TI?6dtNXPY-CE!NJ;(qV9|~ebYJLN6LTnGX8)KCL|&|cbIs-LM4>tZYSBj zT6eZm#ZxPC+b-br>!x_9O-^E}bG+-URO`uonqUUrOm&tSd`C8rL~ASJ-4=P2&G-l# zcO4(I8L&HhcK|wWLx-#Zz9z645hX*&i_J=DfGqPBhID8XkmU-$oVi~}akM54t~z@d zBy;b2-Ni$0_55yH!bdQoa>;vJNeiP2-1+5k=>BBbH4L|v?6FOnNNkViz zT%mqfUenp#{1H8BQ!E`$ECtKrMloOfzrnIYXty)k32k|q(p=SC(b?oQqOXg*jxlqV zQrZ$oHtAHVSZgxeobq3UYd8T%PNm4(YD#`a>sL`A4P@3lWX_5*DmUjpgJs!Q)16WJ z)lF*S|MJ0rCYZ%RgMwwW2UHoE>Z7v%otp~`dIDMjnm08!qtu(-6K+yG{C_BkuPqa! zRQV7A)H2Y^5N$p&E)6JzO@D%aoK6FJP}B_~xd~R|T98H#UpsjVU;&%ml1tT!%axyS zE#NQ{^?A`^HZD%cqJYe3alqLsg}TX2p@ba>tDrpOBG^hQA74aeaJz&J+CaA!UtI1^ z#+*?U73oQr;uTV|HLGZciur1@-lbeyN^2(iqWt1=!`vLkmXt7Q>av8tpv4SJi7J%g z(GrE9q7FojNsKYw};TC(>)Q59hD-{xlp? z{j5Tf=n64LPfS5JS0BovDd?mlKqGPT9lflqPyR_;-onqb>M^dBO)=qx5=U?t1zZ+0 zU$Y0|d@3I0*Sb8@V(2Bu{f%A=-hdr?gR1$@q(wQu!S(J3Hm);?2b~suAPJzvpc;nK zMxa9B(HtMC(Yi?4f}4ivIO3~8%gaY<%{7PUHMA@>yzCOt7i|nS*G9u>s+bGc=T)*- zuTwCIn##&93P}0kWZHP{KHQ7qh#yhS*hu`ApVylHW?+`LHScCJv<7!{XRV4CWr$#DNyUUVRvwbB^NQJS=Tq2 zdGQ&l{0MxL2jz0h$xO8Ben_<^VQ2wYtCyMlw-~r*Xr*nN&gUrg#3r2~+WSid@mQPV6}U_~iuM~JvzuOSdJzn1 z2xn1h25yb9raF3gJyh5Onp02P9S`#kzAW8(yJnv2uPxo-+os&sF6TIJHhGZ&>$xZR z@BrNz$ilTMya9QoU^Q?U2Plj#H~YOpV1|t*EI6kkZm-HUDpa?@ImWx#-SEIRe$hco zY?ha)(c?6+MwMxWQP{ZI-~}Lu_&CD6GL(9pMj1gVj{{*A4)@ca!^E7amYm^mFFzBY zum%eKP{$rj8dsX#0c^lq@Qz=s*g(mou@4tNY4O8{z+jKk_63a^`U&(wcH$?(2~{+0 zOU{q%Z$qb!v8{1Q<0q!;rxa(zydV_F@nFfv6qkoU)1%~%+M?#jmn_CVHy~qGSdU@4 zJjqtLX`mZ%%CWS1sKrm$;^|JN4?CbCyTFnY@Va@k)7G2fSWSK#1Pf(ICZ;$>6MGx- zOqI#M(Z*o!nBqlC{<1@L1g8)}ajZrrC+>C>-mc?}p@b5ZU&3{`!Jw2Kb>hFIkWzL+ z{)6{S!q=pcykTTBvPrGFB1h2n7QyMN7@^5$X|MSE%I&FeKxE0~2BQU^&S~l(Z5QMPuMP8y zO_;#1wpvi>hFH=mpX@?UzeN_48ou2j52Np9wd(d6YP0G0RmpG)#-}{xXCBlIbDnes ztoX}dUuda#0fj%mNg=0$ov}gVa&T3b1M*H(ns@0d6%Re`$E_WT{E|avliPGL$CJTNU>soD+&5bBTjG`ccw3SkoBU{bVTJLty6A+= zHoV|O+t1HZ$&W|xpWEXlUm7y)Vtj3z{U#esD5!b@4x!lfIbCFvX0>?v*G%~!)hDc{ zCAz?q-F;G}pzcnL5E>|Z;HQ1GrnJwttER`t*z$Y}F2S7+)F6(zLl<}U`cYAV7Wf|% zx3)YylH3HB;C9`npI)?=As?V`H+9iPIj71!bcZ~;-SU&g!>WF;e+QXbyG|nTks^(4 z7*!u?eB}DK~NqHEPpq-THV@U?0^K$FWQB-&36 z1ELqJd~~~|3dEwQPakD9DIb#nQz}zfFOI4K->8&1j4!1fH00#w(jkoWK$uKsW0T|~rMI!P zbe~~U%KYZIgwkP9IQBr8O<`kWfSlY+akAu6W)9HPdc`JMaEfE&^2-9S;@!!pazF2J z3I1*l#PEN~6Qo$OoUejmKyV|1#srZbCd6=j8{GpePhco9q$Y=*1_pexg;C!ZN*%z_HE{P->% z_P24oCLpNPkesDj10n5$5*KUBO_lxdSWsOOzRtSS1#0Q<&88J;w0`n+m>o)5WVh39 z{q%`53MP{A9~Z@^)93!)HZdN4ahBEmf>jSjo%Z7Ji+f2wP&~Rg0v_Hplj^Q8Pfk;v zr`58J0-70Bp5Z@8uNi~TZWBz3Qly6ki)2w_gZA!l0T6K(_DDRt_@ks1Zm zS_p+9JFdYgC+No*S3avOxk@NBhH;h@ML*WI19ga+nw1K1cgSl$T~(YTXxYSI5ZqqnlTCShwLX{z z@F5akqk19OIDV}HH>0wgDq$4v6dTNc!ui3cEC`BEXyXbv+&alb}Lw;0zcB_u&QPmRHfJzWNG0r@rFs5 zP5YhL?gO@fIyk7xosgV}XefkfwN+q>AyeE+w+Z zSHwB!_r(;mUR=uO^Qw4H`0*I}zSLNoT#M{-CcZG_E&|W1;wssN6)$xRQjh&XQ&~TS7ZvvGiFI#y0fjsDT#93+?2dSmo zQrTip_-*cG-A);lb+#pnabL2!vw1dUPbtAc;Snzp+|Pc=X?}AGm;8gv?Oh1fv2ZW* z6q9f=CdL<-m)lc~i91RYe;)2fn1y_Nxt3=X z0Bvo$JX&LXn>ti~y_nwZVIKHtNP)U8vo{^rC1!W}!NLk{)@8ebdqHN&zd|}S!>Km9 zDi7BwZ0@X5;pV#|;2rJ=pX72=;iKyUEz?Z#{SNs-3*&P-DUC~+h+%$CMGxWM_|`}& ze5Ct^Gdaz)>k(+~S5n@mk>PXd(f4&+S5_p0OyjRRGI99``4L27v`sZu(J^-ATg_1B zD%I)5+1!-IGV|Kg*}wUhgbO=LmF_= zDjgVis4Fqbmh*wk*>82Ly2AHjOsF$d#&_)`S2iA@b!Q-RIK}dYZRm^N^yH_iqSvPHb;u?3WrZG1967371IEuA#(OlmX&Z%tvp08OEGIFm zx@jBnfiLILcV%~lQ<+@DOZRCBXU>j-c3@9%iK`9y2ioUuR#^*u4HvHC13#Iw&@3yV zcjc8OzMKisX_RGSX_C%&x31*8f?{7wr=Im?OlvTSzh;+}UWd$~lKr@E&hlL@ayGAD zy*1a^q%r)BPMAZ1w2m>)ET6IPi!Fz&UOM%n+7`mFi%NXMRU)b^1C1S09vQte=ac{7 zujs3v28e?H&0ixD`Z9p^U+60hV_L^q?ubfun0{L06EDlp>1r-I9Lx%PJcm6S4^W-h3RYsii1kh zJdnsxk`OlGcxxb{inL}-jG|W1JN7P7z@{yRrTdt|Zt!fEQeE~^iqH};HN!0f^jXca zW<8orcgP^!>)1d2F!GhWA%qz;GvmN0&JW#3(%os1ETqrz5=qqQV z(wodzIV|&J82qe*>a;;To8SR%`gerk=<-bb?hgte&b12b%ZtmIvq`A~dB*1GNcg_c z@oUPOrRZY?4xii~G=Nj>-tLH9O zu1o8o6f!|pJ#3@M9X!P0w6`WySx-vK!*0|404kWTQOVUzhdHz!tJ!LynVyJL_7A(*&I9^h2qcQn% zg`RLn_}#k*{9wGiXFV9E(pT~1Q|^K2ZU0v?nRsgIT3cyfhb08Atqj`yZ6biu+8wfpfC6xs`$ zo20EbVU+y%HmY^fX^(tL=4(cCe}UI4RJ;6%L2D-;z{sp}@d7bsLUg|6r+=ZC!ARO- zCVsQEvc;exQ9h-WNGcibl-VT8A5%Kfs>8FR8*Sl4J}@NCD{ESed{8jKAYbr(F;VEF!zu7Ru_A8Q%cT*~7K7r!Kj-`kNQ1+{QKLw;RUfs*r6i$HKclrKLh^k$ zc_#Li!yx9A5I({7i*0bx1uEsFZuY!*|I!D*IS*=-$J*Hx2SBQKhLG)0aR#ymyo~hj z4)8Fe5_HV}8#~1X6&QC#-dlC5`+D6`pDoL>-9KxjhUocj?DPxz>PXr}r4-R>Riv2P zbd9|^Jz32WpBVCMz{}rAV9jyuUi_d&kw;po;<%q0RI|4k(sYo{7ekNYaUZqi-kqv* z2W&J|8(wDeepikFX-ybu_hu7DiWe*L zGmKt*l1notda*)$zZ+b_a0Pw7_)Z5(R_@lxj<;%rD$*{{gN~_wQIIQYt zaDwH}TAX?Y^Y)IAy`vew!y&XhTI&vm)nr~FHpTg)HGU<0mNqEUo?c7=TwgRsLD$m0 zD;ruUCz@@8b8h5vJ9}$VCf}rVr+XQ4ECBKlhXVA`FQs_flovoTu2re#q=2hNx^2-& z1|@=y-=!$nRAt-l^dY;dd!SOk@EUFP*bI~L*BC8~ z>@T-d3*c*yqt7Fmg8}V6J4+cypCMOMe8;)(C;Wxa7hOdF+(2CI-)snH{Ic?fdR(Qpj zS?T4hYSVItVk#4{Dm$R6sq}_^m*qL-V#n+pDgvgGf=$MVpfT4pFE{0NG=;_xdz4G_ z&QWETPVw?`VauD0zXi4VmdTt*b>3uySw39%iW#P#ZAZZ>+pvbH$vH%)g^luf&6&PStVBefE#GOmi7(9e|8sdF~BBt#df$< z9t7x#fIMJY*qEG6-}3~g^k>OiFg3JhG>hSJ=Yi*V@$s7cyTNFI^LCm$(ekL~kj5va z%q~?neuZYlz4&a4ATd5v~|a4QoR#%u2t+y z*+eu!yMk;Vq78J1=_UJ;zYVY}1OA^IWeWHof@f9C>%27x1g4%42W;*yQp+2kZD|K!B`WC`{%#7ClB8|z2!nt1V)O8h!tlsYkJ zZRLA`!}ZqY@(xWfJH7FjO9B?GXN>81bRWu~37R_}2r7*xZfQ)Ls5iB7cT zRc0xd<((;rNZ!m_lo9It$Fz&7%)M3OpMFA#WuOj? z$({y(lgWM?LtT0i^%&4R-IOueXDEVR{N|VlX_@pm*Mec_jejPnDJBsA6nBPiz|T@; zi&V=+gHg6GJ+-9JVoYV37~cN4nCF&XI9p+$JL}PfmC7NGq&!;uElvTH4zR}}CMg5| zHq}Yr%ssreT-(NYca=~toz>UiieERKWrj2XT9_h~b3ffToOqXg}RV3vT)DFdy!iA|D%W z^%nTO^DsX>x?L%=&o}~e3=WW_!r7O*><6?7MV#NN3*Z{LYqY@7-4AcZ;0Br?i;1nc z%7zv%c*}%+XZ)&er%k68vC}a5991!BhDpD>^F_@Sb*Hz{^5^7DlR|_cQS5VF=C}e4 zZt1XQYb|R+kt8!)XuY>gx|qbv*OPB_^ZPYFB>#~1>zKl*o&t|6ztN&fi_^X+RA8}( zA<11V%K3~#x^s9wBQd!Q4sor%*CM{~%1VwMTG2(R!Mj0taya_%$D-LFMrnRViyMWc zaQCCG2me<7wp`w=Tf3w&n`(N*E2C`1dB(Dr01=A3wL5v8Rqq?lKTw=opsb2TCa-kJ z<(%=Xl_tUcws=~V54%h*gFl!Tw>nt9a^$1B%<9>-s<*asu|rT<1ds@(rCG?qtS-GDaw>i%Zw|CyD{X^fpya3w(3 zt|!UFnm7}4;vL(b*x0dc+qP}n#>BR58#~sV`7ch@`On2y-%VfiRadRGx@vX5>wSz1 zm{QpCig8LU#`KS5vAO9AV;AODGE%|vB8-*66-NY?!Qp%`nueJGg2qVXL$}OG<=FAN zkZz3D%*Lf5Fg0Zw;D40l8lrY1U492T;HRDpy5vQE#TA&_|G`rOU=b}fMmyQix7E-i zj6cdXMXLG|(9>ClTm0-Q6evb0Qmax3Wx{^8XZ+G%-YZ!gpZVuu3f@S`!X>|ps?%aP z<{+&j#GM(v_(lpBBFZAlo_X}DC6_DFX?&hEhSvXJM+-+re}R%B7WK^Oq5Q+%5@!m;#` zI+a=0Mxa?ay>q0Se^9V-R{Dy>48!pJd2~8NT_Z7Erg|xz*w|4mM$Fx?-N&uhM6#}W zAsd_>v?@yFLP zexJLcvU4zN?*}o;k6AkP$|Pq3S19}3{rG;|+2sJfhjDwxG8N8WJ^ zR9^k(G>*CDA0SYSqIb5m*lBh~yyvCTQfHjWszu@XOREDf&e##_QEU!Z1OTZF z%`8~Xu0wNK`+kK+laN)p8|L#VOvYSEk&pvMR?+WgFL$|NW$yuFOBYIrTlBhDs+DNu zYIm?|9HnxSS?Bx3){8U_R5h ziC#gal{zlMrFct7kH)svC=?xTj}7B<=4Fp{rzkrsM>EFJUmxOax(~bt>x76-%E&9} z`(ZAexG~oEIMwzz%I>16$cUyNDM)Ie|&8+OP)?8~ysfTmv8+>mFJ z$3qhJSDD0hVJB8HjUjG|+J|InWC){AZnS>w_W3R1>{%yH1kX6z9Qlk>_!k@h^Vo*2 zCh?ehwMQRm&Ejs)6djgzbxHH=y#tKFy?2FU?KPEoOIg+!#%8&=Z_9+tW*Hodb+Kj5 zbpc6aG*mK^yzZc-@|~3^!3kTc(%=LFin`D7MqF|`n==>dR>eg3u4LC%^-V;G`Xd9% zA=|^0Y;p(vYJ9li#;A5e=)=59Ae%Oi6yrJ6gQGyX>9hF25J`od;UTO*IN$^vMxYhLW=*iL-VdJ$;=o z_d}lZ>Sn`U^6?Bn&o`5x8=5<>((HnO3rxM97c#p>W?!jj6fY#t+PTM}v5yAVR%SbH z?h%I6)k*Izf=ryLq7*jpt84sI^lgny59`WIfen!Bq+m@qG&T)r?M-t!U&hYBdQLUM ztS+5#2)If5`@QMqFX(ErHfZ8YVd`%!xDO8-=gLtiv!@`7w>_KG8k~6$aAG{7o*2$M zIoGma>wHc?SH8s7O^GLRxwy!xw0L|KkCNDYa!dVPX^L?0lwpIP1S}{JuAwFc;ul)gBA3l66E#btJ3R z+Y^?9e1A+q`j0ph3(()%D`EoM#v;R|YT%1Y&9p~H|4u0fIH2JjE4YWCJSb=|$#qH3 zU$QT)^!fU{*X(b(u>7Q2k`8&$o{Gc7T(Agu*JqJcBfsMNLSNeGf+|TfkCMVo#16)M zwY17J%Zqn;4Wr@;5^v}YSGE&_WNRWpA~~IH(NCPp_rr7+BP~-j{azQNN}jZ{W0pU( zVLaAvQ8)b4!hB-)Z85RDiu;eQr2)p3SuhlRy9zyGt(M+e_ZWDFo{yGlwn&mK@Y~Y8 z>Fiv(MdKuo1ZLURg9^{M>?}@@vlUUknIwwFLp%zFa<+37L2L^Zv0x>_k+Q?fUFLrD ztMS8JIkc720f2G#o~?V5yZ?_O=Xg%YDq=|Q(|n+lo;EZ7vXa*H_;>9Gwe^hsA0zEq z3MyKzY~3HIp;^=8q6EfOzv$RyH&jWPBtkg1&M>Mhe%=#HzNfra<)>B5(HD}Oc)fz5o1jw#gLkWSBDQRfs;)!LkceF~Ac z1?+wnH7rt9t>@+u*E?$ud&%sN@HmpUrPaI#-fpj$9KWmtUVQRskh&DqMZ>;bP163A zbbjDYFwMu_->s0Q6@w?xc<{I*59RaAlypBWnyCLJ(D$|U{ls>=qo8J|M6_>9rB;1% z+)LJuli6PRh2zDsgKDO6{JS4>whRn7hn9hUunVW*bdDHvIrX*uVdbs85=H?ILg7Q~ zWusJr(>P+~vu+67A~4xvem7W;Zd(*2SB2L*9OB%9;3?A^VdIb5{%?(uvQ)BYyqh zvh_B55tWZ}$?!2I-xahOi#w*#Nd|HIr~Rg&N-zu0(Wgcl>2S9Vg zSR=Qp+Bx5#z@3#ut$u*PI;zixXrSg^*_ftMUBCRM^qUlVR$Odvle3K9S?R+Cox!6e zR9w7XR5vXXf7Au{n1ieD5+F6&zig5}T{`@fv3-#WKPZx$z#E=ZD}E|qMfH*0 zpuSue>fbnW5@)}40Apw(#sDw|3gJ$CBWfZkVt)(U54bq2G2hEqk7TyD4TA>Q8ITjE@XilB@pTv=_wPEXAD-rp zuBjr^(@e&Vb%44mpGuvplM^hrx$D_|M1R0LFm$wKFnr4%QRc{$oftF6c`8g@+~=lCyd2Z0d(73BG`YGCtyFT>p@LSM zgMaM5!rjatnL;@}u~9O1<_{H`J+@=~Iw0VXEDWWY#@J#(u^ielKML$j-BTbzT8SH3 zuHM#Lw4;YS`ju;c?>*jh(U_7NeUZQ}d3?DeoxrDtlC2Vl0g*pz<8ddaF&Hv^+;Og^ z$%tStTr3((kx<&g+_w@w+XM0kl4Ae03FS-%V+mj|Gk$)g)2X%k-09$t&iVKY@xRHe ze>8$bXTiRG%Lo52$gCXyiOl*xL|?#VQetivzG6}aPJ%{~=H>sT$v*u5aNmP_DjC@+ zDOT1n_Bv<_Ll<}?Fls|%J@g-6=6$VeQ3utqAO}*+p$Nr;4WVmkIJdj{hUtBFBIOXh?k&ba=^d+vk~>ae><+63M-=f<=#)QJ`6lri<# zpY^C$t~h2>w$yO4caJb>kSV}WLaeOg@+wO#VeL|??>qp@5l0db;!yR@iAd(&j_y~1 zogQOr!k!e$X}aYKfB=?JAMZ@G(;n$(jU=|y?lP5rIi?SAX6iVm_i;AXIHvb=%7nH{ z>*1BCU9?9T|2(OiVmIwD>mPKw;!2GtF{~sGWYT2PyP*oy!!28s%t*%u7Zqt;@Lz8 zl2JK&^U`!Gy(fdAzD4E)ynT(QA}`vW6a;%*O#7^k-RoC=>^FRfg&?o@Fn!4P|M7?a z4sFjl|H~8Q@Bc-faQ>$}F;481?U(-+GUE?5DB4A4v9A}!C=v~Z@k5j_Dt=dcw*(o%eOOoQpwe$#be6_kmbhWyVjzyWqBg$ z=us_Oxq>P*x_7X3rb%I+)2Vw_;hI=mxcx>;{i*m?gnq;0(}RAU_~vyd%PEQ6-vz6x z2!cO)mbBQZyooRPeRQ+qb2%6mDbJrb>UaA|5b_pScRv;XoqjpD}x;hn--+W0NJ{SBt4+r2F)!0=yt#qX?lm3Gw^)p%@cca z2l4TZei!oN2mfHC8w&j%c~?~G9h^>Ir5nbI9b)Ys>jw;ff9#zhhQ|pqz_~+q<&0}9 z57#NsX7=KNt6R2q0rvr_ThMk^rbB}L?AKO-rcls_OgzbFh!>`Z5E3MFQ zb@}+d9yJ@1ryFp&<3IPS!iP^oG@pkrAn5;H^f3&{><0gWiTB?Wz5ag;X1tu#00Z`L z=h&B8u=Va7J|xg=y*ZP@xWj)pOCDLLXpk&3x#XvW<`XXu!uOs?-QGruo18(pJQwZG z0nFbz_k1(&6Pbq(1Keg>!D(-ci&k95i^zaaST%I=XV9OvzvP$~Jp`Vnk;AT8N^p0Z zz=#ONV_%D$iF=s6Tg8shFs5CoCUVv4V7 z8)(s}J78(6Kl5-sb>>xmp-bz1Th&vRK7*O66;5%X0a>f>qZ0WOMuKvV)U!>%G4cu9 ztPtfJNe$gvDcva<=*ZeM8jw(s+D3(|aR%lyP#3=ieN#;#`bosL5sa_I+|gl-eH>o1 zjKAsFaN@|qaL|BW>PJ{lChOH=PK*MSAX`lWcWt>cp^8IKRp>Ck^O_jDV~agPTFJ%T ziAl(p;w1^yVnF{k3|svK{ZwtZg8+qPD?Xzu$;FpRKg|EXBd12xdhu8I+qXQ2|Dp^H z{!`?xx_z}4p4*>&b#9(d5|N36ZY5xd2Y(2|Fb;Wm{47pl zlB62PyE;F8FI>B?E<{YVVBsj)voIc-@Zj!IyL9or zdCz^n`@DF+Af^=Z@!sawS#jp2?_J@(ScHcume94k&PL?X+>D`Tiqp zh=^f=c|7NuqHX-wf=zy7hyGyUORfBPCL0wy6 z2i2zD{FUD$*YO?$p^!}5@cFnBPSnt*$+kbisrLB!mEE;KV?z$^P4AjVLiPaX@+#tA zzK$K&=p|LH5ROnZ>>$PU7+I6e=tL{*#IkA{x`*_BWAYJ(@g$zva1`|Q&h)cyS|F_j zg)K-^(c;rNPovoOK1-Ery;S=&%RwLMKKG6wuD3luB6yz)x4-7@w}gE^oce&5}M@Sufw zuEG)XP%Z*LLy03m%+X|xwW-zK8OTIqcW=pn@c=S#9lb;gEU@Z`;GQ&7OICIL-62u3 zADWjcHXEJ2Z_XEDXYy{tHtpbE-Y1fnDe&<5D~fJwMfie~h}n(;7rTFsSIY#i-iM$#yDE**7` z3P%YKw5{krLd;D@w*a)e)K%hk&?9s3n(AzF`Gv>mJj)8$lQKCD$@JWdjF3}20n?Kq z3HBT%=EoVF=!@&rT5H-2*Zs5+Zo8CF41?NiAlUUG`dwyD8CG7qjGDTurcckIbxa?u z=2C^5GZFXPUgLV@l*Y$1$#6n6*+do=&p7%@gcu)_B@WDgh$*Gu4IyqUtL}+K6%~qI zj>+13y<_)`Z7giPe7L0_6Zfq*My!7_#`n8_ySlDwD=J+8IEIMbEIifequBGgqLoGy zq4M^Pw|Q@uRZVTgb@;cav`s_fv+0m;AtSJSpz;NOvAx$eJ{5pz8OV6UWN0;^La=n< zd7B|?4Ikrq>VVMat5cNfTv#6ZwvfFKD4ztKp{_!RgWx^#_H=wDya{Q}25jsW(chRBd-WL$f7S|a??!T4YI9=TF4oY*-4A%I0t5 z~W{D*ZFpKTg_ZImyrJ3_l~-_`Ar7oa~H)O*k{*C|zM z0s$Mbjb)8S@5U=Gl7{(^TBevIIw?MIPXusty}$^vM*1$P zf~6f(YoV8D40N}|=uV8eqKKwkyfTWNV1;zPG5AN>lc`GsUDn(` zlqJ>N)|`hJ&JLi!3(S+w&dnvyTMT3*{PPPqOp)6-z~qoS)K^PX*)}AhelS%~Pc{=b zr)j3>6cB{!G!Y1;R1??2r&#WC=HRr_xDra-Ffmt@y076Q9FQO_WNaGy_@p$SN2v`y z$?npGGK_j1;5}k2Hob#9@_`0MQN@ob#_x{74 zrBmJ*js0q_vQX=omq}HRKPOplDb&LePw%$LLfzo=5j+hnm1~2uflIPBC4Z%;$J&dl zu%18=qxgE&??CpAP`Osr9vSIjoGMwjwg43Ccl74q^ zV=>EM2UKg59C1h$+V1EBD;TwO72wuLh2%7GqZf8E#OS}5r#{w zMwiCA4)x0-mAx*2R5oyVjy^^)?rn*^{f2V#XyB44&~l7+GAZO$`od`J^IhgZ7E|S@07Pg8;CY(EZnqgy0r3=W~_W*2D(= zk83THK%S&*=dc7p0X0lmDr)uSeWK}qZBKm}S~Rq}Oo^}sVE_;*YA(6@9`E2xbdwGG z0^@k`)X2DT1baQ)WI&+;KQN%@2q`zzI!pJkI)pO{o^wS)oJSMYICP#eR=_<&jQpqT z{Ll++vW-6(8et;5PZJwCta=Z8nSyDAS8@ zX-XS=4`BPn*MzP{Xh@FM6vFqYu(+ZdN){5b#vicdU->~~Gw(+WO%;vQ;GWOn7Kv~8 zIu1+Cw4vawg>Bd_7B4cdW4#VL<>Hf^z}@)JpLx_S1g}bHlo>v;cX?dS0st zXW7cq_Up%{zYivXNvV1bkbS~5&W4eN5K22f8LwaB(zH+NbTww*vCZ#zVlWfu2<({6 zB$By|FR%pa(;86W{OT;J&CirvP@fqv<(sY316!~uw{(NIBXgulu*Z2|m6dx;s$3bx zGDvX!2}7&=$%)P1>vfNWcSLP!5?WC}xtn8%W0QbCI0#j$!Zy`spJOnk=olwDT4??cQo`{rQyM9&)V2p}u$sm9Kp|VxvM# z5lnLtXor>&O;sqPelDsUEFlqpWN0b$j$0diD!~6voSHV!99iTMKxP+S4EiSM6_&Fk z?CkPA8NCpos0CzaMs2hSZTJ%xu+DVQwuVC$vTH5o>ExqA7m(#we;AZtjV|L(Rv+@tIxpx& zT|qkUWs#@_b4YoXQgaV{Q2b^cJaf3v-9ZT0r(wVBx_j8fQI>J<3LX6G@ z$?bEvDqsCtt!=&0b2P)eI8WM2GCQ#swm5QRiPTKOWPz}XQPwWl4qv`1JL|_NxfWh9 zre3y&?6v}L@o}4h$?-nLlI@c(Rk|rAH-9!#teMOTn4161idNjMT*kFW?T91Pd8QJ3 zOI5N7W?`81#|F@HsIr88YRTC8K|N33tKbuNBQP;FDhd~AGmmzeSar#<1a>YD&rP{T zH3I|bs8u`f?kQi2Lqc3m{Tv|12C2g?%2LWXfOFMi2?M2~GJ^g1Yi)(HIHtb;fGja# zAn73Vv>viEjx!u==!=r-^Pe88UGiXzZca+_2=ByBmTal~HqP4Y@E!-_Hu8%lBymm_y2Dnv`e zoTiz=b?7eRI)K9YT2f$ikjO+@&UnM?9W+7hYb60Tr=UifNNDx;Im<_oyiNWIYR8KY zT)%z_vn-;m1ii^>(yBPsSU$?TdTrj=jemNoQMns?ePAuX`aOk0 z5w)6N`#N`6)+W9g2Vh%o?+RvHbu%x@F;3?l?%HmF2+k?Ffp)$iX@-d(){r1r^enPm z^|x_~c95-$C$9AY0zabe9r%NA85QpcStJk5lwgMhKDqTF$CdoHtO@In4kechVF;Dv zm=uFul*sg${%glnV8dtt63#hiO2YlPR#* z9hy%&cQq37@`wz{b4;{Dgk5$(%mIWjSIVBAbi_~|dg>v`nmTftZ_@os4FI|Ms5Jd8bZ3YouHbuEeuAA8aERCgB+3!oA8Eyxu)(j zLA|I9eJgR>a6gC8K<@q3K4c6WY@eWYElqJkl&bvpfx3prZ5Oa=#lP^x4Z)eQT z==#P8q*wmzT2-u0frF>PrR#%oZY!%Z_+}A!IMEp(rc@$)U}3!s5=*2PU-F2FxL{N` zNf!xsW_JO7^4Os^yaK$~^&bJVDz@Rkd-N+CbtVPv83*(rxmh*OJf@SBv&_I!*kSwv z={fCvG|{=EaRiWF{O32dtzci(xM2oCZ~Fm#9QA2)pNu89XZuEPzmum~A)G_-%10tm zf7VhlwEGConsX+VYiuNiNXFMj5o%upZK!+i%oI zI+)nedBQ3a1Swcq`-MWtU5Q?b3LeQ4Ejo)qvW=PYNDVI3$LX$m3!IZVoYJ>68~~Qz z*NkZ_XtU&Xm1(699lb_(w9T|(qX+c@t3M6m71}7yS&~NF|U*7AS47&-5Zw z)Hs*(&qdi(9)tD6viprUK(`%ib;FZkQjT_Q6a^gKL#Uu?z3w+xFo%7t!|4<+AAeTa zSb}PI3Zn+NF8)?5?kq5h3i86OurcDARQhNWScLwh)6W6Y1hDd&o$twGCLbF8OnSSD zdt7MxEH3b-w~u!Zx;S5gWTT>I-mrw9QF^208}p=O5lgFgq3D0p>mxpA)U0hFg^~A1 z$S{dBN5*uHQgr1UVln;_cqa>QFi!KNC|@f4hJhO?TQ~p9)L*@Z4d^8{Kx^KM{($=s z{pB%2;FFEWG$K=Gz9)w!f85Glo8K=a?Hfu3Ut*Wsik103ScNt^(J@6<%(R6`V0=w{ z&5$cxQ~5~wc%AMY#AEt<1qvjSHtravR*J$wf{{+=-6h?f`7=hcb+C?ZL)#miz%U0r z$??!WqxO#kfu5wThyQ|3If7GGQIsQA;bc?p5&KUECuMD}2C+SJI6U$CBbdjShvY`I zcHuIFAb@0Ryid-RA*jm?ECr{%WlH%(&1Bg=mziSM;(zqSndyC^-@IX0f(tesVGtPq zV#7UBixWlS5Co1Zc!zHQAi^s}l3y)fL{iP7coX=3(eL-s!FG4Yjuw!pg}i z>5Jd(#-&OoC#fv9id?4LIa)yyt5g0HRnqf!=Y|ZplCh*5Qf0f9Y})#C+lF@U%rZ#S z@}XryI-=d0Co(sok}8gY=`xGA>;(J51`CvrKWh%m8oswS;s-%4NRJ=j7wKG^{fHGz zrGawV?$@zNMmi8EnqRUl<(;U=7UqTR=M>xn?%@%Iq?OJ)VanJvSK^9G$m3rRNGcI3 zXTv~eyu+|YRWHaR%EK4B7(*H@2)0eV-3ytsm%9nIjefG7*qh{G7Au`owEYdx@IyKv zv$eI-6^NH$7az0qV~|skDxcP73jx1@Jx(#2u`h8@@qEr}Vwr_jN`6PB(hn=jp@PhA zlz$Si;*d%|M&EMNZiACFXpxdvctE1EY&y*=)Qm zdwDJF=V&Rq!A|H_Q6$t`?^Aw=D)mA|B?rBWpebBX|E8r6rB6P^Dc#^bh zJaW`#Is4=>#YtRNDCh3TmDIx&v`dw+<-B?Mqh#iC;Dd#Y!p_Air1_86WKQCG^`zLu zq+tcr*PI({@CMErPTGPgqV*^2JxOb>V6It@Of)R};rCdGXIgY|Vy~4k@ zvyyzOhP14GXBV)XOWZmPbx?+5(q_##qfrhSSr6<1#tT_Qij!t~=rfA$bi}6u@LZ|O z1&ao-gbM;JaM;jf2sg$#i#>m5i6OwAdTMAO=(ki@8lw2;8$P~}$(8PmBkQWee#vEU znbHgFj|8TR3elxhlDVGUGQ9oP`33pk%i~%eISN_`pMdS zgS2bjNi1bmYA=WwD%PSTkUh1D3TN`s0-Nv=?veG(0ru6~N@eVjHu7_Sh;U@(vvW(o zle1svwe<3U^)S!ZUQF)!f;FxosW;DceSC1y%TEw1)vEJw(bg1Jqngk_5{b7A0q3&Rj=)ZO94~Wq*X4Y*$z@ z-W;4K()FQ$kjuCX1i_cMMM1{tkgXTifEuTpJT`_+{Cu4o>|Eou)L>Xl)#dF1K^X~x z!s7B#;4HKw@0>d__gG~}`U%^;uxA-QAidxWvCskhESmu@a7Ao_7j_DJhE?~MiFrn! zPp~sUh+C=a0o&PaVUl%J3mPb1EOK5bTlpvSC<8(;(TH)AJjdQhBc#olzQ2)Y5t-_p zjGn@}LRm~ozqn!_jumkssiKmZVhzmgf=den-VAQtCCpP^^Y<$Qjrc+aD^j+Vqw?8X zJnz7{6lGZ!(82u_d(N+P{=t($*hI%WWn#rEi2W{*gOXsFhnvQ2gfeVO6>iKttax$U zP|RYiO2I`jbD2ZnwV)9980_VK3)D6wqEcuhFxh;j*?7X(M=kXR8-};SC?`D(8}^IXS(bSCN-VA^}Jjwxl>)F0v&k=B6N8Tck+q zAark^F4*T&-37Q#p^jd-B^%=y`!7^^2#jW7lm?IE7gZOyH1fzS74KmRdvt76XNG3r z`u1V7uR+Dt=IRA=S+=*qKm9C1USKhb?i4L7KU}VJ&f-d9Owb`9K9OC#E(oy1as{EB z+B8~N+Azqu%a$=d-rB43_lG^Q7K_>P_ZMts^7m1Sr^N`F6eNC#MYm}(9aEC>YhLZGpt}x|rtR&2gQ9}?cN`z; zA9TeByTR-VDEq(@w=n|cBHaMxB;#pI)nx<*K18K6n@zp+>rUlA(?taU@<`Yxtg00d!@oU$+F@H2+vXw{6(zgLpFE{tLjIs0rLtd?ND2SMV;-Or+g59$++Q^!aRyDKQfyR zAyb13`|4?9VmO{&x53h|CDd5#AaqtVc)j|I1b(;sV`22{W66P=Y7vUZ+$s|1&9}9A1U5 zpvK%M9Jy|xsc(A1N||GTzF`zQMv%$19>^9+th%O%s(LN1RA$F6oe-Jk#ywLV1J(bkiGY-_HR3X z<$Ght|GWGxH6gER9d{@kF-&Z3&@q*nvus?jYG4c)xZ-OMUC2?J>#c$Z)JKc?CM&qe%*j0`2EvcU(*eAQfN+iTwFcr z`CBvlZ9td)-K^5~G48X|-D@B;^KzZ8nSayZZyW97fS~L8vdj%KU)JsHF(F@*{%><% zlT7VQabF9^CS;$qR-*^)KZl_^>mP|T4%<8D(o-8R6Z2Tx)tfv?t@XtIu~r#6%@hoP$K?wnZ1GrN7Bk7U(sG-zQlewoWFeBvCw~T zAAF@yJk`x|Pl7mwcqc)jqc>!pEe*52;=k=?iJZ7Bv#J(M+(xy6gJ-11EDEaxD#`Hf8IhjzE`rvn;1Th&zK!0sc@}nm?3tTCpQ{vbhyP< z$;}4;rjK@zEXWxwXf6pkG~GAHI6|LSijbvolTf(R@ID_$SUaB++qR;jdSXfcLq<0N z`#?ODw=_3Sp<#YxoSpNOFK-ze#XfW}2OEn%Pi9WIuVfxT6+23G*CIq?K5ZctFHH4Q znxO{nIA-b6~xR&RuGo zJNxZxHQ^abt?T`z@rLhfvJ}~vHr=U;>Tt$_@O$+4K+ad9+uGL>Ox7U;ApO7z74e*J?u_nO(VI0zG*m= zM zS$$ZdWBopbuJRxHvQLUUpqm=Nc#&$Q6eDp*L8dYS*onyP|K;v2S0gJQ)dx>sD-HIWxBlc;#`&X?6`KkXf_kMXKXFcMWU-&cc;Qa*}TRNO~HChz8 zHughxU659H$Rp_S?2gxqCdYk`#{YY;&nG&Wt7<<#eCe8IfT%qrjd-0C&`J7y0{JuP zFMB}zwoiEehRtK2bPp$G@(FUI>apugP zeNAdGwST#vpbs~uEO$yv*Zz6M9?H{gJU|V;^bBDd_gf4>yJd={cA1@Zh%fA*qoZyh`nxOA?jV$^N~ z_qaI*R80yDi^<)m@X!h5s;_hFvBr+EJm{Jwko^YCQY~fa`7fa(b--Z7q+ zIMy9h0i%gsEB;opMM_Boe{}qJQ*LTJUG`pU!St&WS>As*F!)T`CU+>>F z)bwa{5(4OZ-e|rkXqk)LbY$Ji&EPb426l+kFHJbZw=OZtu-c*bBt8F3^2OBdg~#rC z=_uv7IMtW&7w$5o(4wUx^|y0O5M&eG_uCSgVVCy*t7*HAU>PJ0_lGK$9Aj1~J+AtmpJ9JauRBXMb)pqp6oYaVM?^ch@^Bl=m0KxL;7 zE}N)3i!qq?uyC;DPRQgk7a{D7P$@rom%Jt``~ao4sMAeQxeJIVGExIHiTiuft4j+; z+Q82{?1mT{Ysh4EVSeL<&hrvG@(oS@POt$KMdG=U)y-)_NAK->XM6Z zk$eYG6X@~f-m)g~IcCRIo}X)z$~h^i97nI-w9oi|+0?tcV2e20IzQuExr&(~qNO&G`FU{()wF?A-bR z7PRcaj{I=0DVmo`W-EAt?SwaiedTD}?f#4N5rP(xkw-8yqGk2IY(~54BH+CCxYh)7KpY^Fe{`ybcigDa~9GIL-%#^KKeU-L)9`XdF&&l(d-q-RSU2meer7^0bS_e z-9ztU(FDXM3GP+>VDMpY^>i&x(M;<2qtq+MBt7UKl;JB(}sAJUzN6F zaCP3yD%;#(0N!rB?+Vo3j$NaXcyvfS^}a)2u`ajiju3DDR8Q#NNM-2`_c3uDNZ^XF zR#$yYZq3_LT#EG_l0ygOSt-d`!_0E zfH1)Dl9@QjcPJvYsbgqiGCfORo?)=(wyqL46QtWY*?63%V@l}%;|s?p0E?C zl*t$zV|ROb8Qs7KB9Hll@!5hU_}Kl;>cQW^oG+Li)8UVb)wVkI!4%&5JF>s$;#JD! zcb*)e;S3fLR(^q;dx)rAP8s%sobebRB4)NP&(Tx;;{~g*YPX`Pxt(d6^Zino>qjDu=qEH#3%mj@s<&(oon9-wA5a@yqn*{i?Dn~xBK@? zr76_kR@Alblzbz}aqkIhJ>=6K155yvf{Tr0fr}FUw`7WB#mSmvEugX>* zZ~wf?R>-y$FHkha#6CMr*b{+i3C6gK^s1Hig0Em(eDOWZgHYjy^xRvdJGSf@P^94}UiRl=JP2^Mp0r91U6!9FvrcL&WmY@e#_b@)t7Rln0Ybac$P|@!Tq6 zO7Cg+--izM?tpKNAX8uh3|==1?{lt>7AlEYTnff6peFqdm%-@r@PcJe4W2&fq4B)c+j-?Uh(Ei2Wh@iEhbj^Ta&Gh=(cQ`nv zqjqi3a~A*6pmFwM5dFkHr#P(#?@*k_=ltqu zUomo6h6t#pc|{2r9U+ifhh8G!dBmr66%lZFT9c zJzextmHUY2M)XK3o7A(8D@C2^WA3HyQ4~}ix!zCPo=yp-PuIP z{eUo2RAy9=GDzMh@uwkoj*!%grB6+w#smqZ@2cly)NS9I1yI|>XNYyNwDcY_lH(ep zSD29rL|`B!{Og`kWt!A%qcl{7!c6bZtQ4)9RMt8aFP_q(t}!Iwn$w8P5BAX=l%rT7 zq^V4Z#1=sukBVb@GK&n|4@u0DMp|wHsL`5jlTPI&#!`oa#RV$MHtvejnl&A((c1z9p+&@f?(cDR;|%TQvOjFZjd^`7E;3N|r!%9W&Wfov;T zza@2X-E@tk`$CX>Q-AT|cP9_+COVD!xjM#>Avy{cW?-~E`btM3gGfF<&EAdbL*^2fP5hlm_{Rf^Tg zxQSF=&1%-p|3leZM%S?fX@b@hS!`L%WQ&=ZnOVAGx?*OQY%w!4GhHz=Gcz+Y)9Ur^ z^ls12=`(ZAmsyn=^{2Ax%c_isjHnZKhpk#Eu9cEzO=Z-fCsV)K`9)gHC`=*pgI$X8 zE2@fRn1A#{Oq-(&t(FhT>fZ?>y8Y6p z{QfZNsbWb(5gK%oa~ud=oL?eklAUuE%mOuE``pQKM^QsyH=;XC9m3bJF)Yvw??#>= zTtI^_dTR!>*3q$vJe7Oy6@MrU#v~Bi)BEmotJ{q2ZWLiRa$S3GlH)~ccFgVUW_F%{ zNJrqJE@O?+Z)}8p5i%7p#v*B zBUI7ncDjQ$F{PNjBaP7mA1G;R4+gVn4YX}WKZ}y%?@5fxT~(kr*fWmb}y@MweYx1#V6R@`OI@PLjL13 zlcz;>cM&7jxie}on7hKKFKqe~?~IC1OdF|86PE!}Ce(e8SVzW%wk4Rmo$?;IPHXyo zW<=*zeJP4s%yWL=j76h&7O-M5*4FYAaC^6;T;}3N_W@ZrJ66Q)ds(g0t{!9P)^}}1 zy@hLu{~20Nl=znYQWKm$`V(P?DXaS40>~Y&zz5g5J=`hu!Y!Rv_@nj##$#l~k+SSk zP~R&f<C-i`Wi{@pIK9n8?G}U?rEe;0^|>HI8MsW7Yo|A?-E(LGqui8RhZaL zUx}Tbp!R(yuVx6cy>Kl0dbG^PRjPg;df^OipAd!mOmz678IEoA7iBw_M@?wg;2>dYSY^l?JvH%UwNz6>6z{@cinWi{GQr>gcJ%!CxOhZ!dEaHmwMqv};$y z<%!%G?||;S!COE|AzN5%i*OpBAseep2*8n<*@5sXCcfy0XeHUUm>Q(Ja4O{fv9oOOIy2yuQgN3-(#lF;NI8K_g@affM}9mI`;&BMq(mE(a_$OpuOtN7lMsN`EK zSL$8bSkmX-s_IzajAn4C8+1p(==(?{N1J+`aPz@WY@ZN*JL?f+1z8aGXK?QnH*S{3 z1y3L3_zo?1T4H>TO0ntzf|B2jx+L9x&;ANXEm9Eho9pFz4DAhv7BoKT;u#41<2sm_ z&(~dVP;ZuSacTFr{ZC+HjCqP5p;j-$N_$sljgYw8_5ECWaRHy8voe9|9#VGD`L-EY z<4v{FkwgruNi-Ulwn0TLx@sNQBzF5wXrt-ruN_UyiRa8WZ44+p zrq@&7XdWC^meE?0(gCSv-@VA$`&1-8Baw)*1QL=LVlh$5o?tvCUf?`?R56M{yIjZw zDoGuTr^!W4_}dKe$Z&wN!YQ#%+(?3O{=M%;tN9K4{3jf zTruSOr+S6oydzZ?qgYtiE)=+}6ib*I%PYJo)APKhVftZyKuLc_^mjxNz%4t!?nDlT zm3v$t3^rMSuSr{tM__cyrIb!?!XOo!p4qU>9R9Shh z71&xmL$2aIQ^1w*janc92PZ&2B(d`jHiyx=j?VggP~T zj42LgY$pfE??~b}{1LeTz zk?~ONK~SNiGQLDIOCs(|?^t)Y93`J3Ut!?}4wIU2&*XZXG*p}*X~@S9GSr>{@p6x} z5Mclvdzuc&`p~G@Ry(#ff|O6g0-tS`1M8g35CcSiviW4OTlLREPrpee*5OGE)WsI% z!hhLoDoQy*5)vc#(>g*%%i%Py-=eVkBN)>YZCa1$5^*&*)71O=xB0kil z_eW;R@%xAhS4}lZb-QLNmPQhNP}1TGR82-d`978Q-U07ECaSB`l$toD&oIW=f}_al zC;Ka@_0Z_R+E*Zq0fKdXcAg1nsNmJj^qsfEO+=;?a#vvPAMDlzCCqNyISN!^#HFJ_ ze2wapuP}%YGPI{)nM$6Kl?e{`esw=lnq-nves*$`8A9mz3`11Hc)e|;V61Lu3O_sf zJ4+&mF=XyLrAj{NOOx}B5Syr;Z4EBB*+!r5!Z}KO5{{6F$)5qw199S$z$ZF!d61Q} z`U%?t3XbDSJ}}yl>FSx7%0z)a_aTD2i}@?neNfo| z2S8l?gpWMal8u01v0}47QQt4Xk9KFzs)AV63nB9^uq#Y!3iDfXpIGzbsO`TC(M=#@ zZW?iAclx)#VcU}&($q!mphn^5+p3b+ZE^|gBejP8<5TU*Ha(tXLI%;njlca1Eo2J! z*E*UxHkX3irL_MH9*L2ic<6kzbK8H& zA3^lE@biS6H#qKPI{E1BR3*NV(f?8wJ$owaWZvb&WcXV@pA%cE+R9GXZ?JA=1#H?T;`Cj_27L@`cX5?M^H1! zMe#5*i!*sUqdk=%2a;+s@%Dl9@TaW|N$+LyJ25Pmo7-PZ>2R#dGMKd_Yg0<@LAc_I zWvVs&>t!a&H_Tnw+h#=e;tz%`?=Yb|zJIvnjfv5hhAREpE4yJ+7rDz0SND)}&iN7m z7I0l%o%Y_3;|-l8MShdrpk0LN_!q6Ne?>dj}eK1FGE(2{4#5X&;n6l^v5OXWeK>U@<~o+LAqh^vUJ_oy{9N#Qrk z{CgeajI;I7OUKcd4RbwKHmm~JTcBr!2)8`B8SDAfn>lx_y6*{$P92(q0dZBw!f zJpawR^2adV%^f0PfC+J2q#+9uQ3)I70^Pm>rTm@Vxf;Cu%HK9~UaN57iZwD+@l*jn z-Jf}=t>_8zfSMPP;{Bi7d-{?z8QJTd){UCxKD zd6#*X2CMXZ&x=yvXR1?AstmOh*h4%E=G-$ML3DVa#yIx|IES`fT;hdAo>+@N&l-rZ zy7V;!4Uj)&*)9O8ID4aGdBYXA=@^K$lF#(Stn(+UOT(WALNeQ)VIsJbF0+*wr{d*P zEO$Aj6<=$fKj0sp^8Zn8On`O{^OrG5~0 z4~D$UB=hrkm9Ail8=V7(y)3i`s*jz(Hk`n+xsiHm`gh+e$V*WPU?aPT(*kQ(S-FR> z5F3?3Xi7m~v-mhXf#@-!U8IwP4IfSpF{_LRJCg1=reFhjxmh}Z6Dup5n>Vk{BOV!L zMBt*h>JR9M8D3IS_r=VyM%%M7+d3o}V>Pf}J2cqt>Nj?lI1BgAr>04AKkb516F$q( zfckdLxhWT)-ze;;f+Rx32oymQNW;t$RIr~!`KE!eMYQMU&X`senmdCK|E>$ zp;na2va5IuNg+y&r3}G7`2`%#-izkphugHXYW4Ok19Af-k4%K?-ZT0}vxi*k_I^(ZT;V6ogjY^4I+YOx=8yiudxJUI;1&|p4#nGFG$AfvC$b$` zi|M{14nZw7vgUW1cx=lMVeFMJsT2Iz#t3B9S!`PRuS?_!i4>KvsJDo{sP2G1!TM4w z%xx*^JGINX+Tt;o`9tU1V3HFwbOzz`?PZe?fH7eUv|Xy&$}KT(G0>-80D;RHgcC1& z0Z2c%gIgHx#7x5v9L!3n?INx$WK$qpoCbIKi!=+$GFezZtD-G9BpF5;D&n_D4>P3Pb&k>qZxL7yK-e-r45PEJ#B^i@>b-e3VexiiQROlJ?lxbPTU9v`-C5NLoz>k1!T!5 ztMSPVQ|-vO$&Yl>z*Al*;;QNdKcT=P3k@SzZhrkv2M6udZCwcL@rRuRdl=ExAaqaj z*bI$WNSn}P6~H%yd6~fiSjGpQM4W>kx6*CHP))`^{sJGS6KoicG1Cla7N~+TIWz+4 z$B?9U>o`%qpCdnH8-$}p&;w=WY5Hj~AE(he@6;{JL)ob>3Q}wMhi9DsK$`>v8yYxV zgF!sN_Ie_9n#8mB-1Gy4BM~@A;b9Z6;N45?(T+W#zgv$;Ayrdx8<;HFspgmA7aF;u z7kKoQH63c2hA*Kn90aY)xO|X#^t9J`RCt(X7510S(8lAV8yEDJ$L&YhsdQ8SbAK3+ zu0woG+zTko>iuh*iq283cDFYM^MW6HU^c|rT9PF-7(nsIyw{ryTj;6 z_w#_xMkzz=^Yrt7itYn$uJcbf@wabI%KsZ-MGXJDup)D5(6)HX37MT29T_A7KfVe0 z>XC}m6ShNhSBNr|)#+TWEUYl1Evx{{j8*Z^#YrZ^jjBi-NRu5VI1bbQqq>d0vl`=W z9#PS;O0X#7kG@>Lwa)q=xbT_$%(^hRUC+xhGVy$kljD1OCGdWj7UO$6s@3)RSdZd+ z`bYlq9RHB`=doL?^O;=X&70wJWcH+h?{jN*vh(SQt@G_o zEX(VyUo1<-aOn0~Y{I50{!%sGprT2pMlm;`jzU!`R3zjrKLHiv^agD*J~;M-q#!q+ z8Glq*0>GUZ7JK6=9ULpnO)4V6h)*RfA;PU18av`D9TKaYwsWHhqR8=8HJXlc9>Qm1L_3K6lUkD?3N3Ux~G)v6gnwY+*%~n zo2K55Hw3r=17z%}!mxpzvkhhwY%9!@G*&OWyQPYy{hj7!HRKUV@*2dFyT9WTLuXE& z85OYUa_Y3=2c*blf@fS|3MBrcL-5~@3e4l9DFT?Yn{hXDWLX%{ z7_CV|#FeA;SvX{Vag6U@|1?(+g}P5zi7nRRNRo(2mB~?*IzSuRb$T+RGl_VXT{U*v zq_U1rpthVPTyE1}$ZDUhmMwMCIk!<`9-*{YV18T|<~9x*&NW`PN@bpqV5KX*t%!B6 z-=HeXnr%q+ChyqO=oq=xkVg%Z(;@FOVC@&= za+Bt@NyiTw#V3W`oV5A>E0PPnNe;h(Rltu^(4iPHWE@e-iTM$7@|G{&R~!QS?jK=C+Mh#vn9KqpjGeMts2Nmy|Fg zDG7iwmH}&+F(!OiJTc5jT9lFK-*&#ZYL?o6oJsyGDHLWbY!VDJp$Zid3D-~{rZkuf zRa8LEQ;iP^#jMZOQtm58gZa1p>p$c~l%Pd*YmF zLz5{WPNq_-#(G8FnM}F@-J;Z%!g(}ZDaA~hUGACgpe-r3TxuL>;-c;tVxiZJn8{Xl}8cGe~CkWmuKwCXNPY&TREpm9yEb zC?KTgP2=ljs9j~XN>rLAWqT1#TMKs?!Cv?kn`m=nd!ePueNFQ%Y?!ziZs`E z=uR`s9R&8vq%P7}Y-Br|g$;0ACM>{g&`iRwuv#UnEVSuV2Ri)mPdwv&7#uIb>XekpYKJ(e=j2xBFR6Wq-yJt_}=;3aYBH6xOtu~d4Cnl(MLOf zo7`vRu`HOB3`@A6jHurx&_0?hTX~NrBbAfMOyOp8)4!S7lZsO#t&-VDe)Cw-bgWJ` zXP7pK?KcXeKnJIW(trm&@%YgMs>9b~8Z)dJ`1fOm)u8WEi)!#46Y?m&vg=MQ(UH_C)rQqJ*Ba5J z)C|=)0Z2$-NkK|oORh@}OV&%~NJdDyNdg;Qc5yy@ygA8L&Oey)s6{oy8}HmnwHf|rZvL`wf*p6Dd@G- z#+shpxM9ucx6}uk)YVI7HKPWN{eod4s^iE4UfzQSVZYI(Xc7U`jY}qW^E;{iZeeif ze22OHG-2TA&@_ku+y-_7ySbg@e!DPT+n$~Leu}VJbQ9_cjr7_kE4%fbKouq7D=;=1 zT7XtVlfm?GVXX{CS|zKOWvM0aUSz*^meovYFHKyJGXbXwGr>FjJ_l8jB7 zHS4;S`_f(gvB~XD1#0)^^~{b`m>PN&4ZbFKjjQ?f*iJLGb($`y+c!m?)SG0q<+Pn%{nv>aFouf?)q z>aHXrerhdshP;zcHf0(%292Ypm9WlQ7Osv}rVXR5{>Zr75`;sL$9EGN7(<_^6w6l^bUAXm6; z@*e%NerT_9kQpKgo+t;4AR_1!K>_oeen7vxmp$kj5e1i&liF7Os9L;IM8BrjFo+CM z4lk2q&7tY?s%tx^mn7)3#~}yB77+uNmh(-GmP5zL#7 z%yvSrMGza}XWRoVo)%||qy6#OOn*U{6kE~-Esxd*2G7?NC2f~-!Z;3Wk)li5p!L!! zUBe~a~AJEfd64gxDjxs#r0;k3|OsOsa$1=!+zxIRH-e@7+XZt`qf`J^d~NO)cgNeKi|O9HA~mHwdKG7nq%_(_ zE$h}byXI5-$^DK|@T4HcA=s$=XdZWuyX)c`*Ylf!f_5?Js7U--G)_0i2h4-%#k?#r zpC~YVa2_Z(_=gQqjxl-6otkdLyg|~a@h^HexLe*XU3|SUF+H+T+W4(Jg%XI|#;#*` z$@>hehLMAU0nPZg+y@@i(FFSx_+dPWuGIJHq|1ux7d1VGO@rT~V(|00MLoi=F;5vL z$~f$c^_qHIqhRn+cu3u(ACh+%<_!x5DWm4?!C)=87u?Hl)h}x%zg$r9quTMI_Y|^X zU`f10pTi%pc4s$={N%*4WO*~ZTAo-$5c5!5UL0?aFJ^m-iY5`F((oI(tv%Laxbkfm zcQXgQqM-4Sc<|l0A6$2?=XO)p_yP{`>9}>=J&&&?Px^Nw#OSg%c)i{l6%Y;gS*qo9 zv%GoVy`Y}p@32mpCQPCy^|DeF>3D^_0-xa@u`Zdq53>h}qI&Vg@@Y{c-EnW(FYJ1D z^9L!SX7Nq9C*0HHN)^~I?Yeh!2Y*M6;;V62x@+Aw(Uy%>eciHAWDk6^aTb8L)=TxZ z{0eZ)G;I<)X_VCrtK?buB7K{3!6Y{EG70ef%jM<#eD<)sTf1nosYwwe0sHEOI0XC1vFEUK7(A(vWym||B|G#} zp8eQi9=qnDpd&wq4P_c3`?MZKVSC;x+tV);XNx6EM7gIH4S2k8r1cXIpd)`4++E{UZt; z4ueTR&zh~t|LBVk#slsS$*pvRH$|3BnQg=0>H7iZ@y`QPIjlO~94ZI*7mf6+x>|hi z!EpXzg~I*5gkH^~C)UN`2KZ4;q36(*=vx8y`p*OO8SD}sJ<(QMPoJ*@SR8mEgfv1j zIz5k`XxEr8pnH`(2oAOu`~YGMA(>2yO;4b!$5#X_3_KA+E8ZHTnp)4TE7Uguj1Gte z-3IH5cSX7tEBG3XNQ5WE8Q_R`{BwpSUsL=s8lNZ$%{}lA_84!5Y)_->-WLaq4cr=X z4beOjx1$>^fD6n8`~q?f$@Rw-{gy)4t}mG)9GDtfCL}kKE72&Sk>t=^v>DN^5UIa3 zd~+T>MLuP|jd*AD2NAd+R3JPo7B1tjatiyVGJ0ks^`Wb1WFitFG14mwwzniU4Rmjl6YS@n*v;Xa;^B^ zc(xo{p=(ynYthR@4uS`PV{y#nV@QA>AbKN(p`Ga8L{dVD0n|v#SoY!@6hUW*Uuh<3 zjl6~eqa%pY1X+X55ID-K>5MwZ;_ZgOLa;!@Mqf*f=z0C|nBGTrVjd7WtpnB)|3PY|nkVZ2$-rqK-M18M zhfD?p3d4tBVlz-1xb_7H#~|;LkP1tOf6ZL!vBu~O90sy|Yr$^FO2mbt(qYN?3?w^s zJ!R30L^gseVvfOIJAo;39fXsDE0Il5TSOeAj*zFBa@B+{gL#ovkiAG=MAB7`2p9+r z^!l=cyOEhlj77#G)qm8`mvY!Y@h+mGi%>_Z|EwW0FzhP~9!J(BRuw6aY@)sYS;K0e z*|!i}fILc^C{`YSU)9`hI_vhe-p1V53m+skjy7-EyuJ^$SPn6SgTyltoL!6|pK9MMVtD zN0JmfZxH#nBicfhj7s&j|*lr3!)?w zLS=OmyeZ#Rpo-y@vGN(!fx#p431*Z~vnm56;c6+_j0(Wm5u*eON+lKJVwOG~-eU8l zfMM!{Wl9I-gVHgjlqwKEFlU4~p`Q|6N;$EVS_x1I5(E~EI48(Zrl~X(FIlQA7lMR< z86)lq2$VP~&ZXswfQp4+RVtS`{fH5b1Z~Pi<;t=(`Q{RPwf)QyFSH~v$|x25Vs7Q; ze0#ut=!i=7?i#CT3&v zv6lTNZRv>N)lN8g;{v0mX`gV8S#F>e;388WF>?0oI6f>{sRo3vLy+ z!q{$yV@pc90=JSjMYoC@(SmkS=!i({`s4g*Kwg-rT7)(BI*YsM-P}>Zv}PFv@lbx0 zXu^PX1P`_c%ZvHl>`@g1zhU-3r&08PUIY=g5KEvL{32!!gQ8*lfN2ErfH?RfW)Xvq z;okw_2np;smO@kN#SKy9I)i`#$p{tfGL{9?^7*D5JEdLF09wR?)_hZ;o%U|#0CiOzr>qYBT>jmqDWD^NN_CoYRl0c9^=0W5^!a;^XIP|ZQ{}mlW8N;CdF@`$!a}0e9 zx$Q?ADmOCoLCIE1Ie0gC^Z8$~zk`1*{+|BD`AfQny7n_2EgdBtL+ghYs@6{}bS-4& z{ZhRWy%fD>y#T!ey#&2_y$HREVh-YBDW(OO1vu94tgx&wtZ>$F7vC>nFJMv~bZz-< zTKSMr4J3~3c z@T744?MmZHVk+U0fyjyH#ChzvaGX1#gobW0AafJwvT?}TuM1m~<1>8bQ|%2&dy!`1M4tRg0zqA1yfHpr>tqFG%N-wFPV zOL}pW>WLp2c${2L&c~M1%cZq4Can`L%SE+nCOH$t8U376j)^DKvw+H4!72{op?$ff zidr#~!U^h(WljgjgOjn@l&V_(!?f}_KtZjBNyLOkh7;$h6YG)X+;aXcVwzLtY1@=r z$*pKnd$&x6CTFA5#Sx1L#O6-r1W(2*Xb$1w6nBcF*g^cTdICAag`?C#`mkvNCL@ls z$Wit*JqASoq>zOnnn24a z<6LyCJgwUeq;~cbfbv5K#Dr(TG^StE^Y0e-BVZWOtLw(}tARp+C&iLxO*W;U&}-|C zIyRbG&JGh$^RtFpM{viyV?F{-t^WH^2jz|o<$+U+*;xWUeqc~a@P*jYagC2qe89kN zX+Ho|Ey4lj7)!EA^^9IZx4U1baYnbNA0!kU0y-8otGYq;oL)+|MqXj?7GL#%UVXQ{ zADSWr6f`^{HZCK(rETAq2`&q}ado#|UN@=VIFu%Q1GXj8lJyna8U2!3^{8HBH@;sx z)GK@>PC7liSz83LYnBpJ5ke)l7DJPs?etc7H^9#U>JZ^0Q3@&vc%BB&gk{axW0XHk z9yd*DDDB;^9tR-Bli^HpWIZ;Rna@v?aEZew^^ueNQl_-akfup9q?j{~@1=G-`QfmL zL%4bN$oOeOH6mPKuCX+k*luq9=@#-6gX%n}=|=G@f|`WS#A%~qt3?RHD8oL`*Qr}H@2Zc?rbl-+rzUgrM99J>(6y;sH16uz zZS1@Ea)*(cjO>^9LWflVucV7I*2!zk%|QEA%Ni*s-nToyf&Tf+-`@8gc2%1`Xz`Bk z=V}6GLAqXO@jOpkW!Y~llO3=>i@fhtqSxNlbRp3!K6c8o?JpiXU=?>aoOZJvT~|+P zEDc6aYCNUmP8b;4yzi57&TldeoaHDd+P z47d;M%*x7AE3$Sr&U1(4xW{1~t5TbyuL5|tpS1-hOtO@DV}GHYkVh#x)9edVgm`b; zQ=8s+%+iPdFC-6{{}Zx@%>N0AL+1a4%pvptjdxBmy0JD0E_8-n z#4J6RSylnNh&)0me!wi@|ATuL+0&L3w|b`BbkY>8<@?)Hqxq4E5Jqzo6QG#F`WPqH zy#?AFY=Q7!!ZKA<;|7lFvbVLs=4Qb2_^I)t#SA+gTdnu|*;&iI{VnKZsPQb&Db*Ob zFiBCI5t9^`@mz$*-2S3@XU;cWHZEMn`lJ0tZMMuxpbEW1hi3J2eRRH0R7GT)!@*=@ zEN^zA?eo*^a-4p4hB&;QkWg^Hk=*pz1`HVuMOwXDPk{I@_V4Kc1|ok<3~S~V!fUpH zVC?neY@((gC>ZA9a4hCpm>3st7d(EcrdHu0v)-G{mxmM0hv^;@R=PUYfQY(sQcD12 zOZgZ@*`Fjb)GGNljT%s1o4|GaPDN;f$4D#cBsp)kx?7p2`ZaRD7#h`KLKGEm&Qt!L zC2wb4hj;S9b^pm>z`({rut*Kv+!jl>*x#t8JM)5YkK zxDJuink}w+GxT@F5u=KpcgnS@|5BCeI$Xd~mI=Q+TyvC}`g7Kt4=YA2CSq4Hu;_@I zx53Ue`7f|9TFm2zp;40pq-&Cm8G3hqgpp83*2sfZ9GARgXk+$tY{Ny~)%X|2nY#arfcgUXO&t(07 zIQRSFMq9dWXtGuGrqSi8ss-VGWpzfl|IJLr0N)%g6XVG&)VdQ86G zf@!)cx~#r2YFmO^LtlLRfmMUI1a<}y{ciA;Lud(((8vIN3Hl6L9=R{0OJIxkTMeoU z_~W<7e~N#GeX6*59^$@--84OXMPNmm7bG=PoTzxlv853 zw4)}~>iTat@Aa95llR)yN9$FlTDQ0ipF3QMu(j=trJI_I(uu2?ZEG7(*YS^Kyqm<5 z@$S(*W382a=<|BAmvc$cIE-~sy4_w;n`4T3@<(@FggMe?d}+k=%y=old2F#PiGx5< zluDtumk|g8#J79uo`$tC-6)zH%O9A!3Cnex#tJeiaZSk1kc}8&s-Itv2hH>V?9dMk8 zYCQQK{FoyIHydI|rtXqp73jIU`a39CRvt`6C_DU3ipgiTx0* zLF+j{qaD$?K>;rS+@>?N7K_ByBR zZf0aGQbuSpT_lGOQ`3&efzhlgMqP*Ju|?m=@vrP!pmuYdN65M}N$q}s=!F5pcf(sB z*9c4P_9=N18}iZA0MuyJ=W0Pw?G;x4JnV^q5pYtCh8sa}wIEoWNl#g|1&I4w4w>HOSF$GWG^6q-yxPB+{P)m}-mQlE zG)<&Vv;aJwCB;FwRBEcBJ26Gie?5nCywp%HKeXIEcp{-cl=G#?t7;2KcIL%KeH;)t z039~n&rJI}eRMkmqi7Ry;Xg0-Je)IqZ1&C?7u~gZoGxBB|0l;7hB|#QMeuLmK#^TkJqkllV^Ovq@A{8qZ z^!!Q0`TS-g+wNfQ;a{%DwM#(N)~)yxxM9+9d*HQs+l(=B{rf~P3K1r+S<8C+>{!z7 zL1@$YhZoE1MgV3#0{F&#pptpo6hmLAbuvxL3LD-Y;*vb)%OZ_K@E9_JErzRcRfxNb zvu7H^9(KD~5p5Y`Qv|Z#z>k^jw42Gpr;;*V2vU0m?PWpma4qNIa}>J|pGgAG*9G&f z2f8edLxS>;-bH(n4~k$tS~XISQ8&TSq?~4Npv8Nd(5me}`!M!2J5^fOImg*f5l{Hn zM71HU>Hc0LpYeazV$t$)JbzT}Fjp&Aj3VqySC!)1<(ALzGkmEjNPK*0du^GzBpR4~ zps0UR3T@&h_Nexn&Mgot@Y$wBJVGYUk~F*43QKII6=M$Lg?6+~8TCzq7xeVU@`Uln zfJJx5{>X(lxTG(hN2>=|6`OnIwY2q&+T`yup)Qj+DzNX!+TN|YTnqH2s@n}2KnsmM z9GInl*C<#zF>+uR`{|&AM3=tCmb8fo(8l3FKmSP}L~3Y|BdNLBVr;2Be?L(x=i`|o zmD#!It>|Kc;o6&WPIm}oDR~;U$)L5>T;Fztja4R#4IuMYO#TB>}yL< z3SVq?)EoL3U+gu|Ct}TcLMEsyGFW+1{FBAU7Q4R}d7^U@Y~=x+DNfZn;(4SP1RoBz zi9BhP13TCIdMco&N#KppThqd{{%p=UPaD~iW-1*og|T53F|u*c|1>b3Z;d{iKeA$| zv&N9=x>)J{P<1>J>b@&MaN6+~~Wllc#+W$6J)8D1XAf~)2fnDHN|7L@N&@GW`du!{fHL@!H z`}6DPf6b1SR))F^{9)X;e0v#N68EB349shS&dn@!TpiXX(q$-%J z-6ft4JBo6+`0$Umtv5eFBk_y0c_BO!ZP#P$nRW}yN-7%D2~+q-U?sVVD)+wZDm;Yl zr+i(_iUk`Ul(=wZKrUz8ItfgQ8174^O*ik&N6%xq!*=eXp?x0GpCQ6Gd2j(^Wn8Gp zmfy!9As#Kwx4$9Q3|fgMob6iCMw4Y%!|*0?U$*(US38h-%6qTm@R}JSfe_D@5D$E; z;7Fsvo7)2t;&u*ga@Rj~v~>dHTMr*Z*I0q9RGrYyL{qaBk{BuRVhdwm>Q$gs# zjX9MW5Ao~6KYR@c3xBjTq(!jQrAprH(J9*38j^JK^(SF3vS0j^h78)zE-`Y^2t<7A zxaePUfq;YX2!4E+aiJ4Ii*f^8pM{wqb$D8-44p&~a3&E@^U5XoNpq$Is~*JH?LAIm zG$$kiew!Tm+}fqSc(?T!zA{nGNYcO}DO^<3o{ga3|D2h0L_QMRID74|YJcT`g?c$K zytO?g(uGEk+HsmxtI8JAcy?Ih;qQp%aow3S#c{nAmc z*MFw$rx#ixi#e?fE7d7s${%>4Y6e?6zD#V{Sd#wK0^FMUhH!-1euIuuuy2AdagD#U zBjg1uhCnjryDGE4q!iL*+#0&dVDCluNndAWm)pSGP$j}eeP_H7H^lKQ71S+EJo$j3 z7=CC&=GY62M6pjNBiMriU)5-X>lEB6@s!c>qCmb&HDyAsH zoS(+qJ*73?IF&x;m}*E9J)2T|N4x0elP#M=7n@P*Nv+UTYgA7upJ4x$$Vl`m(70a-3QfKW`F}@bE1;wP|0a#KBim(r=}`lE zyL!4t;OM*f(Sj1?(f<-t<|~%KL-ze_zPGB`TZ%a+Y?+yviF^xQ)ZccP87kKK^)m^*D)?q7apx7L#2Yr{Rcn64!$wZ7nsR^1{YU`gxe?gyx%zp=Uq!;cZi zH8jql#;4o`=o#k@V%15)Bx6Vs-|+eg!<5JzE?M=1qj)O2*_DKO6cj|%2craJWMLVj zQl(iYHC7bSRi(<_)jY%f*U>-RPj#dH8u`Glg8Dy3|Nms9qb4PL`F{-T3jEOR@?(>u zWR50QX)~?(ywW=t=u=8IE^@YGgK5RFLY8YV>qbZV8@0`J2gVKC@i!{F79%cNC%t#< zDs4N>=Aol+Bvu$49-ck2wz{>ZO?)?9*@KRNvuvtuvrdpmoWaAHzxYiF>8VAQX_g zE4%Qx?LG_{fw*6OT-3o+&$d z-YLHL8%6(*m4B8~>mS%4y9G?h$dCe73=6&)1&LJ5RQHNJ!0`Yj6x;92$@&{~yk_ub&%Q-v6i6oKJ5%rG?+Vee3xu|2g*m2Oq0{v+zzR zq$jD%r^o2_bai!^77X{ll;M%ylyT58u+7*1s&D$WQC&-2U(C+LN5{(~62L)armbnL zBWY}JevR~hOL?J2Zrqfm!BV zQCMjOHv=QfS76#?5dlh~8VT#RhIfj)izP!!s;y0Q+Ogg zKHh{_J9Zz4pJom;-x|Y?9g+m>FeTQGX+6)C(tze?V%Xs>MZgX-V(o}beDThyl!0O9 zBJ}uRl_p?^Ik9%2-l>k9uI5i5Ui{|>V4eEb{wBXzz*Q;+{D>|dQCL49aE+d zumgB$G;wyIUTBPLhu<^;b^xz8Ce9Aj-A%}LEWJg*4&YuV;_N^jC_=Vl-(S3TK#E|Z z%SF^KF0ws2jEwm5Iz}6UIDetG#F6bYWyND3Mr)Bc+fZAc$hK`}gW5)>$*AorWRvwd ape7R?Lae~rfq_9B2=@TXaAi(V^8x^j`zm1o literal 0 HcmV?d00001 From 9a70dbb51ab9a5140691c6822ee7002ddd366337 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 17 Dec 2015 12:05:43 -0500 Subject: [PATCH 115/322] Add ability to profile query and collectors Provides a new flag which can be enabled on a per-request basis. When `"profile": true` is set, the search request will execute in a mode that collects detailed timing information for query components. ``` GET /test/test/_search { "profile": true, "query": { "match": { "foo": "bar" } } } ``` Closes #14889 Squashed commit of the following: commit a92db5723d2c61b8449bd163d2f006d12f9889ad Merge: 117dd99 3f87b08 Author: Zachary Tong Date: Thu Dec 17 09:44:10 2015 -0500 Merge remote-tracking branch 'upstream/master' into query_profiler commit 117dd9992e8014b70203c6110925643769d80e62 Merge: 9b29d68 82a64fd Author: Zachary Tong Date: Tue Dec 15 13:27:18 2015 -0500 Merge remote-tracking branch 'upstream/master' into query_profiler Conflicts: core/src/main/java/org/elasticsearch/search/SearchService.java commit 9b29d6823a71140ecd872df25ff9f7478e7fe766 Author: Zachary Tong Date: Mon Dec 14 16:13:23 2015 -0500 [TEST] Profile flag needs to be set, ensure searches go against primary only for consistency commit 4d602d8ad1f8cbc7b475450921fa3bc7d395b34f Merge: 8b48e87 7742c1e Author: Zachary Tong Date: Mon Dec 14 10:56:25 2015 -0500 Merge remote-tracking branch 'upstream/master' into query_profiler commit 8b48e876348b163ab730eeca7fa35142165b05f9 Author: Zachary Tong Date: Mon Dec 14 10:56:01 2015 -0500 Delegate straight to in.matchCost, no need for profiling commit fde3b0587911f0b5f15e779c671d0510cbd568a9 Author: Zachary Tong Date: Mon Dec 14 10:28:23 2015 -0500 Documentation tweaks, renaming build_weight -> create_weight commit 46f5e011ee23fe9bb8a1f11ceb4fa9d19fe48e2e Author: Zachary Tong Date: Mon Dec 14 10:27:52 2015 -0500 Profile TwoPhaseIterator should override matchCost() commit b59f894ddb11b2a7beebba06c4ec5583ff91a7b2 Merge: 9aa1a3a b4e0c87 Author: Zachary Tong Date: Wed Dec 9 14:23:26 2015 -0500 Merge remote-tracking branch 'upstream/master' into query_profiler commit 9aa1a3a25c34c9cd9fffaa6114c25a0ec791307d Author: Zachary Tong Date: Wed Dec 9 13:41:48 2015 -0500 Revert "Move some of the collector wrapping logic into ProfileCollectorBuilder" This reverts commit 02cc31767fb76a7ecd44a302435e93a05fb4220e. commit 57f7c04cea66b3f98ba2bec4879b98e4fba0b3c0 Author: Zachary Tong Date: Wed Dec 9 13:41:31 2015 -0500 Revert "Rearrange if/else to make intent clearer" This reverts commit 59b63c533fcaddcdfe4656e86a6f6c4cb1bc4a00. commit 2874791b9c9cd807113e75e38be465f3785c154e Author: Zachary Tong Date: Wed Dec 9 13:38:13 2015 -0500 Revert "Move state into ProfileCollectorBuilder" This reverts commit 0bb3ee0dd96170b06f07ec9e2435423d686a5ae6. commit 0bb3ee0dd96170b06f07ec9e2435423d686a5ae6 Author: Zachary Tong Date: Thu Dec 3 11:21:55 2015 -0500 Move state into ProfileCollectorBuilder commit 59b63c533fcaddcdfe4656e86a6f6c4cb1bc4a00 Author: Zachary Tong Date: Wed Dec 2 17:21:12 2015 -0500 Rearrange if/else to make intent clearer commit 511db0af2f3a86328028b88a6b25fa3dfbab963b Author: Zachary Tong Date: Wed Dec 2 17:12:06 2015 -0500 Rename WEIGHT -> BUILD_WEIGHT commit 02cc31767fb76a7ecd44a302435e93a05fb4220e Author: Zachary Tong Date: Wed Dec 2 17:11:22 2015 -0500 Move some of the collector wrapping logic into ProfileCollectorBuilder commit e69356d3cb4c60fa281dad36d84faa64f5c32bc4 Author: Zachary Tong Date: Mon Nov 30 15:12:35 2015 -0500 Cleanup imports commit c1b4f284f16712be60cd881f7e4a3e8175667d62 Author: Zachary Tong Date: Mon Nov 30 15:11:25 2015 -0500 Review cleanup: Make InternalProfileShardResults writeable commit 9e61c72f7e1787540f511777050a572b7d297636 Author: Zachary Tong Date: Mon Nov 30 15:01:22 2015 -0500 Review cleanup: Merge ProfileShardResult, InternalProfileShardResult. Convert to writeable commit 709184e1554f567c645690250131afe8568a5799 Author: Zachary Tong Date: Mon Nov 30 14:38:08 2015 -0500 Review cleanup: Merge ProfileResult, InternalProfileResult. Convert to writeable commit 7d72690c44f626c34e9c608754bc7843dd7fd8fe Author: Zachary Tong Date: Mon Nov 30 14:01:34 2015 -0500 Review cleanup: use primitive (and default) for profile flag commit 97d557388541bbd3388cdcce7d9718914d88de6d Author: Zachary Tong Date: Mon Nov 30 13:09:12 2015 -0500 Review cleanup: Use Collections.emptyMap() instead of building an empty one explicitly commit 219585b8729a8b0982e653d99eb959efd0bef84e Author: Zachary Tong Date: Mon Nov 30 13:08:12 2015 -0500 Add todo to revisit profiler architecture in the future commit b712edb2160e032ee4b2f2630fadf131a0936886 Author: Zachary Tong Date: Mon Nov 30 13:05:32 2015 -0500 Split collector serialization from timing, use writeable instead of streamable Previously, the collector timing was done in the same class that was serialized, which required leaving the collector null when serializing. Besides being a bit gross, this made it difficult to change the class to Writeable. This splits up the timing (InternalProfileCollector + ProfileCollector) and the serialization of the times (CollectorResult). CollectorResult is writeable, and also acts as the public interface. commit 6ddd77d066262d4400e3d338b11cebe7dd27ca78 Author: Zachary Tong Date: Wed Nov 25 13:15:12 2015 -0500 Remove dead code commit 06033f8a056e2121d157654a65895c82bbe93a51 Author: Zachary Tong Date: Wed Nov 25 12:49:51 2015 -0500 Review cleanup: Delegate to in.getProfilers() Note: Need to investigate how this is used exactly so we can add a test, it isn't touched by a normal inner_hits query... commit a77e13da21b4bad1176ca2b5d5b76034fb12802f Author: Zachary Tong Date: Wed Nov 25 11:59:58 2015 -0500 Review cleanup: collapse to single `if` statement commit e97bb6215a5ebb508b0293ac3acd60d5ae479be1 Author: Zachary Tong Date: Wed Nov 25 11:39:43 2015 -0500 Review cleanup: Return empty map instead of null for profile results Note: we still need to check for nullness in SearchPhaseController, since an empty/no-hits result won't have profiling instantiated (or any other component like aggs or suggest). Therefore QuerySearchResult.profileResults() is still @Nullable commit db8e691de2a727389378b459fa76c942572e6015 Author: Zachary Tong Date: Wed Nov 25 10:14:47 2015 -0500 Review cleanup: renaming, formatting fixes, assertions commit 9011775fe80ba22c2fd948ca64df634b4e32772d Author: Zachary Tong Date: Thu Nov 19 20:09:52 2015 -0500 [DOCS] Add missing annotation commit 4b58560b06f08d4b99b149af20916ee839baabd7 Author: Zachary Tong Date: Thu Nov 19 20:07:17 2015 -0500 [DOCS] Update documentation for new format commit f0458c58e5538ed8ec94849d4baf3250aa9ec841 Author: Adrien Grand Date: Tue Nov 17 10:14:09 2015 +0100 Reduce visibility of internal classes. commit d0a7d319098e60b028fa772bf8a99b2df9cf6146 Merge: e158070 1bdf29e Author: Adrien Grand Date: Tue Nov 17 10:09:18 2015 +0100 Merge branch 'master' into query_profiler commit e158070a48cb096551f3bb3ecdcf2b53bbc5e3c5 Author: Adrien Grand Date: Tue Nov 17 10:08:48 2015 +0100 Fix compile error due to bad html in javadocs. commit a566b5d08d659daccb087a9afbe908ec3d96cd6e Author: Zachary Tong Date: Mon Nov 16 17:48:37 2015 -0500 Remove unused collector commit 4060cd72d150cc68573dbde62ca7321c47f75703 Author: Zachary Tong Date: Mon Nov 16 17:48:10 2015 -0500 Comment cleanup commit 43137952bf74728f5f5d5a8d1bfc073e0f9fe4f9 Author: Zachary Tong Date: Mon Nov 16 17:32:06 2015 -0500 Fix negative formatted time commit 5ef3a980266326aff12d4fe380f73455ff28209f Author: Adrien Grand Date: Fri Nov 13 17:10:17 2015 +0100 Fix javadocs. commit 276114d29e4b17a0cc0982cfff51434f712dc59e Author: Adrien Grand Date: Fri Nov 13 16:25:23 2015 +0100 Fix: include rewrite time as well... commit 21d9e17d05487bf4903ae3d2ab6f429bece2ffef Author: Adrien Grand Date: Fri Nov 13 15:10:15 2015 +0100 Remove TODO about profiling explain. commit 105a31e8e570efb879447159c3852871f5cf7db4 Author: Adrien Grand Date: Fri Nov 13 14:59:30 2015 +0100 Fix nocommit now that the global collector is a root collector. commit 2e8fc5cf84adb1bfaba296808c329e5f982c9635 Author: Adrien Grand Date: Fri Nov 13 14:53:38 2015 +0100 Make collector wrapping more explicit/robust (and a bit less magical). commit 5e30b570b0835e1ce79a57933a31b6a2d0d58e2d Author: Adrien Grand Date: Fri Nov 13 12:44:03 2015 +0100 Simplify recording API a bit. commit 9b453afced6adc0a59ca1d67d90c28796b105185 Author: Adrien Grand Date: Fri Nov 13 10:54:25 2015 +0100 Fix serialization-related nocommits. commit ad97b200bb123d4e9255e7c8e02f7e43804057a5 Author: Adrien Grand Date: Fri Nov 13 10:46:30 2015 +0100 Fix DFS. commit a6de06986cd348a831bd45e4f524d2e14d9e03c3 Author: Adrien Grand Date: Thu Nov 12 19:29:16 2015 +0100 Remove forbidden @Test annotation. commit 4991a28e19501109af98026e14756cb25a56f4f4 Author: Adrien Grand Date: Thu Nov 12 19:25:59 2015 +0100 Limit the impact of query profiling on the SearchContext API. Rule is: we can put as much as we want in the search.profile package but should aim at touching as little as possible other areas of the code base. commit 353d8d75a5ce04d9c3908a0a63d4ca6e884c519a Author: Adrien Grand Date: Thu Nov 12 18:05:09 2015 +0100 Remove dead code. commit a3ffafb5ddbb5a2acf43403c946e5ed128f47528 Author: Adrien Grand Date: Thu Nov 12 15:30:35 2015 +0100 Remove call to forbidden String.toLowerCase() API. commit 1fa8c7a00324fa4e32bd24135ebba5ecf07606f1 Author: Adrien Grand Date: Thu Nov 12 15:30:27 2015 +0100 Fix compilation. commit 2067f1797e53bef0e1a8c9268956bc5fb8f8ad97 Merge: 22e631f fac472f Author: Adrien Grand Date: Thu Nov 12 15:21:12 2015 +0100 Merge branch 'master' into query_profiler commit 22e631fe6471fed19236578e97c628d5cda401a9 Author: Zachary Tong Date: Tue Nov 3 18:52:05 2015 -0500 Fix and simplify serialization of shard profile results commit 461da250809451cd2b47daf647343afbb4b327f2 Author: Zachary Tong Date: Tue Nov 3 18:32:22 2015 -0500 Remove helper methods, simpler without them commit 5687aa1c93d45416d895c2eecc0e6a6b302139f2 Author: Zachary Tong Date: Tue Nov 3 18:29:32 2015 -0500 [TESTS] Fix tests for new rewrite format commit ba9e82857fc6d4c7b72ef4d962d2102459365299 Author: Zachary Tong Date: Fri Oct 30 15:28:14 2015 -0400 Rewrites begone! Record all rewrites as a single time metric commit 5f28d7cdff9ee736651d564f71f713bf45fb1d91 Author: Zachary Tong Date: Thu Oct 29 15:36:06 2015 -0400 Merge duplicate rewrites into one entry By using the Query as the key in a map, we can easily merge rewrites together. This means the preProcess(), assertion and main query rewrites all get merged together. Downside is that rewrites of the same Query (hashcode) but in different places get lumped together. I think the simplicity of the solution is better than the slight loss in output fidelity. commit 9a601ea46bb21052746157a45dcc6de6bc350e9c Author: Zachary Tong Date: Thu Oct 29 15:28:27 2015 -0400 Allow multiple "searches" per profile (e.g. query + global agg) commit ee30217328381cd83f9e653d3a4d870c1d2bdfce Author: Zachary Tong Date: Thu Oct 29 11:29:18 2015 -0400 Update comment, add nullable annotation commit 405c6463a64e118f170959827931e8c6a1661f13 Author: Zachary Tong Date: Thu Oct 29 11:04:30 2015 -0400 remove out-dated comment commit 2819ae8f4cf1bfd5670dbd1c0e06195ae457b58f Author: Adrien Grand Date: Tue Oct 27 19:50:47 2015 +0100 Don't render children in the profiles when there are no children. commit 7677c2ddefef321bbe74660471603d202a4ab66f Author: Adrien Grand Date: Tue Oct 27 19:50:35 2015 +0100 Set the profiler on the ContextIndexSearcher. commit 74a4338c35dfed779adc025ec17cfd4d1c9f66f5 Author: Adrien Grand Date: Tue Oct 27 19:50:01 2015 +0100 Fix json rendering. commit 6674d5bebe187b0b0d8b424797606fdf2617dd27 Author: Adrien Grand Date: Tue Oct 27 19:20:19 2015 +0100 Revert "nocommit - profiling never enabled because setProfile() on ContextIndexSearcher never called" This reverts commit d3dc10949024342055f0d4fb7e16c7a43423bfab. commit d3dc10949024342055f0d4fb7e16c7a43423bfab Author: Zachary Tong Date: Fri Oct 23 17:20:57 2015 -0400 nocommit - profiling never enabled because setProfile() on ContextIndexSearcher never called Previously, it was enabled by using DefaultSearchContext as a third-party "proxy", but since the refactor to make it unit testable, setProfile() needs to be called explicitly. Unfortunately, this is not possible because SearchService only has access to an IndexSearcher. And it is not cast'able to a DefaultIndexSearcher. commit b9ba9c5d1f93b9c45e97b0a4e35da6f472c9ea53 Author: Zachary Tong Date: Fri Oct 23 16:27:00 2015 -0400 [TESTS] Fix unit tests commit cf5d1e016b2b4a583175e07c16c7152f167695ce Author: Zachary Tong Date: Fri Oct 23 16:22:34 2015 -0400 Increment token after recording a rewrite commit b7d08f64034e498533c4a81bff8727dd8ac2843e Author: Zachary Tong Date: Fri Oct 23 16:14:09 2015 -0400 Fix NPE if a top-level root doesn't have children commit e4d3b514bafe2a3a9db08438c89f0ed68628f2d6 Author: Zachary Tong Date: Fri Oct 23 16:05:47 2015 -0400 Fix NPE when profiling is disabled commit 445384fe48ed62fdd01f7fc9bf3e8361796d9593 Author: Zachary Tong Date: Fri Oct 23 16:05:37 2015 -0400 [TESTS] Fix integration tests commit b478296bb04fece827a169e7522df0a5ea7840a3 Author: Zachary Tong Date: Fri Oct 23 15:43:24 2015 -0400 Move rewrites to their own section, remove reconciliation Big commit because the structural change affected a lot of the wrapping code. Major changes: - Rewrites are now in their own section in the response - Reconciliation is gone...we don't attempt to roll the rewrites into the query tree structure - InternalProfileShardResults (plural) simply holds a Map and helps to serialize / ToXContent - InternalProfileShardResult (singular) is now the holder for all shard-level profiling details. Currently this includes query, collectors and rewrite. In the future it would hold suggest, aggs, etc - When the user requests the profiled results, they get back a Map instead of doing silly helper methods to convert to maps, etc - Shard details are baked into a string instead of serializing the object commit 24819ad094b208d0e94f17ce9c3f7c92f7414124 Author: Zachary Tong Date: Fri Oct 23 10:25:38 2015 -0400 Make Profile results immutable by removing relative_time commit bfaf095f45fed74194ef78160a8e5dcae1850f9e Author: Adrien Grand Date: Fri Oct 23 10:54:59 2015 +0200 Add nocommits. commit e9a128d0d26d5b383b52135ca886f2c987850179 Author: Adrien Grand Date: Fri Oct 23 10:39:37 2015 +0200 Move all profile-related classes to the same package. commit f20b7c7fdf85384ecc37701bb65310fb8c20844f Author: Adrien Grand Date: Fri Oct 23 10:33:14 2015 +0200 Reorganize code a bit to ease unit testing of ProfileCollector. commit 3261306edad6a0c70f59eaee8fe58560f61a75fd Author: Adrien Grand Date: Thu Oct 22 18:07:28 2015 +0200 Remove irrelevant nocommit. commit a6ac868dad12a2e17929878681f66dbd0948d322 Author: Adrien Grand Date: Thu Oct 22 18:06:45 2015 +0200 Make CollectorResult's reason a free-text field to ease bw compat. commit 5d0bf170781a950d08b81871cd1e403e49f3cc12 Author: Adrien Grand Date: Thu Oct 22 16:50:52 2015 +0200 Add unit tests for ProfileWeight/ProfileScorer. commit 2cd88c412c6e62252504ef69a59216adbb574ce4 Author: Adrien Grand Date: Thu Oct 22 15:55:17 2015 +0200 Rename InternalQueryProfiler to Profiler. commit 84f5718fa6779f710da129d9e0e6ff914fd85e36 Author: Adrien Grand Date: Thu Oct 22 15:53:58 2015 +0200 Merge InternalProfileBreakdown into ProfileBreakdown. commit 135168eaeb8999c8117ea25288104b0961ce9b35 Author: Adrien Grand Date: Thu Oct 22 13:56:57 2015 +0200 Make it possible to instantiate a ContextIndexSearcher without SearchContext. commit 5493fb52376b48460c4ce2dedbe00cc5f6620499 Author: Adrien Grand Date: Thu Oct 22 11:53:29 2015 +0200 Move ProfileWeight/Scorer to their own files. commit bf2d917b9dae3b32dfc29c35a7cac4ccb7556cce Author: Adrien Grand Date: Thu Oct 22 11:38:24 2015 +0200 Fix bug that caused phrase queries to fail. commit b2bb0c92c343334ec1703a221af24a1b55e36d53 Author: Adrien Grand Date: Thu Oct 22 11:36:17 2015 +0200 Parsing happens on the coordinating node now. commit 416cabb8621acb5cd8dfa77374fd23e428f52fe9 Author: Adrien Grand Date: Thu Oct 22 11:22:17 2015 +0200 Fix compilation (in particular remove guava deps). commit f996508645f842629d403fc2e71c1890c0e2cac9 Merge: 4616a25 bc3b91e Author: Adrien Grand Date: Thu Oct 22 10:44:38 2015 +0200 Merge branch 'master' into query_profiler commit 4616a25afffe9c24c6531028f7fccca4303d2893 Author: Zachary Tong Date: Tue Oct 20 12:11:32 2015 -0400 Make Java Count API compatible with profiling commit cbfba74e16083d719722500ac226efdb5cb2ff55 Author: Zachary Tong Date: Tue Oct 20 12:11:19 2015 -0400 Fix serialization of profile query param, NPE commit e33ffac383b03247046913da78c8a27e457fae78 Author: Zachary Tong Date: Tue Oct 20 11:17:48 2015 -0400 TestSearchContext should return null Profiler instead of exception commit 73a02d69b466dc1a5b8a5f022464d6c99e6c2ac3 Author: Zachary Tong Date: Mon Oct 19 12:07:29 2015 -0400 [DOCS] Update docs to reflect new ID format commit 36248e388c354f954349ecd498db7b66f84ce813 Author: Zachary Tong Date: Mon Oct 19 12:03:03 2015 -0400 Use the full [node][index][shard] string as profile result ID commit 5cfcc4a6a6b0bcd6ebaa7c8a2d0acc32529a80e1 Author: Zachary Tong Date: Thu Oct 15 17:51:40 2015 -0400 Add failing test for phrase matching Stack trace generated: [2015-10-15 17:50:54,438][ERROR][org.elasticsearch.search.profile] shard [[JNj7RX_oSJikcnX72aGBoA][test][2]], reason [RemoteTransportException[[node_s0][local[1]][indices:data/read/search[phase/query]]]; nested: QueryPhaseExecutionException[Query Failed [Failed to execute main query]]; nested: AssertionError[nextPosition() called more than freq() times!]; ], cause [java.lang.AssertionError: nextPosition() called more than freq() times! at org.apache.lucene.index.AssertingLeafReader$AssertingPostingsEnum.nextPosition(AssertingLeafReader.java:353) at org.apache.lucene.search.ExactPhraseScorer.phraseFreq(ExactPhraseScorer.java:132) at org.apache.lucene.search.ExactPhraseScorer.access$000(ExactPhraseScorer.java:27) at org.apache.lucene.search.ExactPhraseScorer$1.matches(ExactPhraseScorer.java:69) at org.elasticsearch.common.lucene.search.ProfileQuery$ProfileScorer$2.matches(ProfileQuery.java:226) at org.apache.lucene.search.ConjunctionDISI$TwoPhaseConjunctionDISI.matches(ConjunctionDISI.java:175) at org.apache.lucene.search.ConjunctionDISI$TwoPhase.matches(ConjunctionDISI.java:213) at org.apache.lucene.search.ConjunctionDISI.doNext(ConjunctionDISI.java:128) at org.apache.lucene.search.ConjunctionDISI.nextDoc(ConjunctionDISI.java:151) at org.apache.lucene.search.ConjunctionScorer.nextDoc(ConjunctionScorer.java:62) at org.elasticsearch.common.lucene.search.ProfileQuery$ProfileScorer$1.nextDoc(ProfileQuery.java:205) at org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:224) at org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:169) at org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39) at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:795) at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:509) at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:347) at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:111) at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:366) at org.elasticsearch.search.SearchService.executeQueryPhase(SearchService.java:378) at org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryTransportHandler.messageReceived(SearchServiceTransportAction.java:368) at org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryTransportHandler.messageReceived(SearchServiceTransportAction.java:365) at org.elasticsearch.transport.local.LocalTransport$2.doRun(LocalTransport.java:280) at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) commit 889fe6383370fe919aaa9f0af398e3040209e40b Author: Zachary Tong Date: Thu Oct 15 17:30:38 2015 -0400 [DOCS] More docs commit 89177965d031d84937753538b88ea5ebae2956b0 Author: Zachary Tong Date: Thu Oct 15 09:59:09 2015 -0400 Fix multi-stage rewrites to recursively find most appropriate descendant rewrite Previously, we chose the first rewrite that matched. But in situations where a query may rewrite several times, this won't build the tree correctly. Instead we need to recurse down all the rewrites until we find the most appropriate "leaf" rewrite The implementation of this is kinda gross: we recursively call getRewrittenParentToken(), which does a linear scan over the rewriteMap and tries to find rewrites with a larger token value (since we know child tokens are always larger). Can almost certainly find a better way to do this... commit 0b4d782b5348e5d03fd26f7d91bc4a3fbcb7f6a5 Author: Zachary Tong Date: Wed Oct 14 19:30:06 2015 -0400 [Docs] Documentation checkpoint commit 383636453f6610fcfef9070c21ae7ca11346793e Author: Zachary Tong Date: Wed Sep 16 16:02:22 2015 -0400 Comments commit a81e8f31e681be16e89ceab9ba3c3e0a018f18ef Author: Zachary Tong Date: Wed Sep 16 15:48:49 2015 -0400 [TESTS] Ensure all tests use QUERY_THEN_FETCH, DFS does not profile commit 1255c2d790d85fcb9cbb78bf2a53195138c6bc24 Author: Zachary Tong Date: Tue Sep 15 16:43:46 2015 -0400 Refactor rewrite handling to handle identical rewrites commit 85b7ec82eb0b26a6fe87266b38f5f86f9ac0c44f Author: Zachary Tong Date: Tue Sep 15 08:51:14 2015 -0400 Don't update parent when a token is added as root -- Fixes NPE commit 109d02bdbc49741a3b61e8624521669b0968b839 Author: Zachary Tong Date: Tue Sep 15 08:50:40 2015 -0400 Don't set the rewritten query if not profiling -- Fixes NPE commit 233cf5e85f6f2c39ed0a2a33d7edd3bbd40856e8 Author: Zachary Tong Date: Mon Sep 14 18:04:51 2015 -0400 Update tests to new response format commit a930b1fc19de3a329abc8ffddc6711c1246a4b15 Author: Zachary Tong Date: Mon Sep 14 18:03:58 2015 -0400 Fix serialization commit 69afdd303660510c597df9bada5531b19d134f3d Author: Zachary Tong Date: Mon Sep 14 15:11:31 2015 -0400 Comments and cleanup commit 64e7ca7f78187875378382ec5d5aa2462ff71df5 Author: Zachary Tong Date: Mon Sep 14 14:40:21 2015 -0400 Move timing into dedicated class, add proper rewrite integration commit b44ff85ddbba0a080e65f2e7cc8c50d30e95df8e Author: Zachary Tong Date: Mon Sep 14 12:00:38 2015 -0400 Checkpoint - Refactoring to use a token-based dependency tree commit 52cedd5266d6a87445c6a4cff3be8ff2087cd1b7 Author: Zachary Tong Date: Fri Sep 4 19:18:19 2015 -0400 Need to set context profiling flag before calling queryPhase.preProcess commit c524670cb1ce29b4b3a531fa2bff0c403b756f46 Author: Adrien Grand Date: Fri Sep 4 18:00:37 2015 +0200 Reduce profiling overhead a bit. This removes hash-table lookups everytime we start/stop a profiling clock. commit 111444ff8418737082236492b37321fc96041e09 Author: Adrien Grand Date: Fri Sep 4 16:18:59 2015 +0200 Add profiling of two-phase iterators. This is useful for eg. phrase queries or script filters, since they are typically consumed through their two-phase iterator instead of the scorer. commit f275e690459e73211bc8494c6de595c0320f4c0b Author: Adrien Grand Date: Fri Sep 4 16:03:21 2015 +0200 Some more improvements. I changed profiling to disable bulk scoring, since it makes it impossible to know where time is spent. Also I removed profiling of operations that are always fast (eg. normalization) and added nextDoc/advance. commit 3c8dcd872744de8fd76ce13b6f18f36f8de44068 Author: Adrien Grand Date: Fri Sep 4 14:39:50 2015 +0200 Remove println. commit d68304862fb38a3823aebed35a263bd9e2176c2f Author: Adrien Grand Date: Fri Sep 4 14:36:03 2015 +0200 Fix some test failures introduced by the rebase... commit 04d53ca89fb34b7a21515d770c32aaffcc513b90 Author: Adrien Grand Date: Fri Sep 4 13:57:35 2015 +0200 Reconcile conflicting changes after rebase commit fed03ec8e2989a0678685cd6c50a566cec42ea4f Author: Zachary Tong Date: Thu Aug 20 22:40:39 2015 -0400 Add Collectors to profile results Profile response element has now been re-arranged so that everything is listed per-shard to facilitate grouping elements together. The new `collector` element looks like this: ``` "profile": { "shards": [ { "shard_id": "keP4YFywSXWALCl4m4k24Q", "query": [...], "collector": [ { "name": "MultiCollector", "purpose": "search_multi", "time": "16.44504400ms", "relative_time": "100.0000000%", "children": [ { "name": "FilteredCollector", "purpose": "search_post_filter", "time": "4.556013000ms", "relative_time": "27.70447437%", "children": [ { "name": "SimpleTopScoreDocCollector", "purpose": "search_sorted", "time": "1.352166000ms", "relative_time": "8.222331299%", "children": [] } ] }, { "name": "BucketCollector: [[non_global_term, another_agg]]", "purpose": "aggregation", "time": "10.40379400ms", "relative_time": "63.26400829%", "children": [] }, ... ``` commit 1368b495c934be642c00f6cbf9fc875d7e6c07ff Author: Zachary Tong Date: Wed Aug 19 12:43:03 2015 -0400 Move InternalProfiler to profile package commit 53584de910db6d4a6bb374c9ebb954f204882996 Author: Zachary Tong Date: Tue Aug 18 18:34:58 2015 -0400 Only reconcile rewrite timing when rewritten query is different from original commit 9804c3b29d2107cd97f1c7e34d77171b62cb33d0 Author: Zachary Tong Date: Tue Aug 18 16:40:15 2015 -0400 Comments and cleanup commit 8e898cc7c59c0c1cc5ed576dfed8e3034ca0967f Author: Zachary Tong Date: Tue Aug 18 14:19:07 2015 -0400 [TESTS] Fix comparison test to ensure results sort identically commit f402a29001933eef29d5a62e81c8563f1c8d0969 Author: Zachary Tong Date: Tue Aug 18 14:17:59 2015 -0400 Add note about DFS being unable to profile commit d446e08d3bc91cd85b24fc908e2d82fc5739d598 Author: Zachary Tong Date: Tue Aug 18 14:17:23 2015 -0400 Implement some missing methods commit 13ca94fb86fb037a30d181b73d9296153a63d6e4 Author: Zachary Tong Date: Tue Aug 18 13:10:54 2015 -0400 [TESTS] Comments & cleanup commit c76c8c771fdeee807761c25938a642612a6ed8e7 Author: Zachary Tong Date: Tue Aug 18 13:06:08 2015 -0400 [TESTS] Fix profileMatchesRegular to handle NaN scores and nearlyEqual floats commit 7e7a10ecd26677b2239149468e24938ce5cc18e1 Author: Zachary Tong Date: Tue Aug 18 12:22:16 2015 -0400 Move nearlyEquals() utility function to shared location commit 842222900095df4b27ff3593dbb55a42549f2697 Author: Zachary Tong Date: Tue Aug 18 12:04:35 2015 -0400 Fixup rebase conflicts commit 674f162d7704dd2034b8361358decdefce1f76ce Author: Zachary Tong Date: Mon Aug 17 15:29:35 2015 -0400 [TESTS] Update match and bool tests commit 520380a85456d7137734aed0b06a740e18c9cdec Author: Zachary Tong Date: Mon Aug 17 15:28:09 2015 -0400 Make naming consistent re: plural commit b9221501d839bb24d6db575d08e9bee34043fc65 Author: Zachary Tong Date: Mon Aug 17 15:27:39 2015 -0400 Children need to be added to list after serialization commit 05fa51df940c332fbc140517ee56e849f2d40a72 Author: Zachary Tong Date: Mon Aug 17 15:22:41 2015 -0400 Re-enable bypass for non-profiled queries commit f132204d264af77a75bd26a02d4e251a19eb411d Author: Zachary Tong Date: Mon Aug 17 15:21:14 2015 -0400 Fix serialization of QuerySearchResult, InternalProfileResult commit 27b98fd475fc2e9508c91436ef30624bdbee54ba Author: Zachary Tong Date: Mon Aug 10 17:39:17 2015 -0400 Start to add back tests, refactor Java api commit bcfc9fefd49307045108408dc160774666510e85 Author: Zachary Tong Date: Tue Aug 4 17:08:10 2015 -0400 Checkpoint commit 26a530e0101ce252450eb23e746e48c2fd1bfcae Author: Zachary Tong Date: Tue Jul 14 13:30:32 2015 -0400 Add createWeight() checkpoint commit f0dd61de809c5c13682aa213c0be65972537a0df Author: Zachary Tong Date: Mon Jul 13 12:36:27 2015 -0400 checkpoint commit 377ee8ce5729b8d388c4719913b48fae77a16686 Author: Zachary Tong Date: Wed Mar 18 10:45:01 2015 -0400 checkpoint --- .../action/search/SearchRequestBuilder.java | 8 + .../action/search/SearchResponse.java | 14 + .../percolator/PercolateContext.java | 9 +- .../elasticsearch/search/SearchService.java | 9 +- .../search/aggregations/AggregationPhase.java | 28 +- .../search/aggregations/BucketCollector.java | 6 + .../search/builder/SearchSourceBuilder.java | 40 +- .../controller/SearchPhaseController.java | 16 +- .../search/internal/ContextIndexSearcher.java | 57 +- .../search/internal/DefaultSearchContext.java | 14 +- .../internal/FilteredSearchContext.java | 9 +- .../internal/InternalSearchResponse.java | 44 +- .../search/internal/SearchContext.java | 8 +- .../internal/ShardSearchLocalRequest.java | 12 + .../search/internal/ShardSearchRequest.java | 11 + .../internal/ShardSearchTransportRequest.java | 10 + .../search/profile/CollectorResult.java | 156 +++++ .../profile/InternalProfileCollector.java | 135 ++++ .../profile/InternalProfileShardResults.java | 89 +++ .../search/profile/InternalProfileTree.java | 235 +++++++ .../search/profile/ProfileBreakdown.java | 113 ++++ .../search/profile/ProfileCollector.java | 94 +++ .../search/profile/ProfileResult.java | 165 +++++ .../search/profile/ProfileScorer.java | 158 +++++ .../search/profile/ProfileShardResult.java | 103 +++ .../search/profile/ProfileWeight.java | 97 +++ .../search/profile/Profiler.java | 130 ++++ .../search/profile/Profilers.java | 59 ++ .../search/query/QueryPhase.java | 68 +- .../search/query/QuerySearchResult.java | 41 ++ .../pipeline/moving/avg/MovAvgIT.java | 1 + .../search/profile/ProfileTests.java | 173 +++++ .../search/profile/QueryProfilerIT.java | 596 +++++++++++++++++ .../search/profile/RandomQueryGenerator.java | 266 ++++++++ .../test/hamcrest/DoubleMatcher.java | 45 ++ docs/reference/search.asciidoc | 2 + docs/reference/search/profile.asciidoc | 601 ++++++++++++++++++ .../elasticsearch/test/TestSearchContext.java | 12 +- 38 files changed, 3599 insertions(+), 35 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/Profiler.java create mode 100644 core/src/main/java/org/elasticsearch/search/profile/Profilers.java create mode 100644 core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java create mode 100644 core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java create mode 100644 core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java create mode 100644 docs/reference/search/profile.asciidoc diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 442b0915e3b..52d45ec9407 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -473,6 +473,14 @@ public class SearchRequestBuilder extends ActionRequestBuilderfalse + */ + public SearchRequestBuilder setProfile(boolean profile) { + sourceBuilder().profile(profile); + return this; + } + @Override public String toString() { if (request.source() != null) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 769e0978a71..e6681bf2b9f 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -32,9 +33,12 @@ import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; import static org.elasticsearch.search.internal.InternalSearchResponse.readInternalSearchResponse; @@ -160,6 +164,16 @@ public class SearchResponse extends ActionResponse implements StatusToXContent { this.scrollId = scrollId; } + /** + * If profiling was enabled, this returns an object containing the profile results from + * each shard. If profiling was not enabled, this will return null + * + * @return The profile results or null + */ + public @Nullable Map> getProfileResults() { + return internalResponse.profile(); + } + static final class Fields { static final XContentBuilderString _SCROLL_ID = new XContentBuilderString("_scroll_id"); static final XContentBuilderString TOOK = new XContentBuilderString("took"); diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 5b09b55f8dc..8df956f2cea 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -74,6 +73,8 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profiler; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -139,7 +140,7 @@ public class PercolateContext extends SearchContext { this.bigArrays = bigArrays.withCircuitBreaking(); this.querySearchResult = new QuerySearchResult(0, searchShardTarget); this.engineSearcher = indexShard.acquireSearcher("percolate"); - this.searcher = new ContextIndexSearcher(this, engineSearcher); + this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); this.scriptService = scriptService; this.numberOfShards = request.getNumberOfShards(); this.aliasFilter = aliasFilter; @@ -748,5 +749,7 @@ public class PercolateContext extends SearchContext { } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Profilers getProfilers() { + throw new UnsupportedOperationException(); + } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 00939a74cf8..29ad7ccf0a1 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectSet; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -86,6 +87,7 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.*; import org.elasticsearch.search.internal.SearchContext.Lifetime; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.*; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; @@ -552,7 +554,7 @@ public class SearchService extends AbstractLifecycleComponent imp Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher; - SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout); + DefaultSearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout); SearchContext.setCurrent(context); try { @@ -659,7 +661,7 @@ public class SearchService extends AbstractLifecycleComponent imp } } - private void parseSource(SearchContext context, SearchSourceBuilder source) throws SearchContextException { + private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchContextException { // nothing to parse... if (source == null) { return; @@ -715,6 +717,9 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.minScore() != null) { context.minimumScore(source.minScore()); } + if (source.profile()) { + context.setProfilers(new Profilers(context.searcher())); + } context.timeoutInMillis(source.timeoutInMillis()); context.terminateAfter(source.terminateAfter()); if (source.aggregations() != null) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 742f678f6ff..0681996e3ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; @@ -30,10 +31,13 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.profile.CollectorResult; +import org.elasticsearch.search.profile.InternalProfileCollector; import org.elasticsearch.search.query.QueryPhaseExecutionException; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -81,8 +85,13 @@ public class AggregationPhase implements SearchPhase { } context.aggregations().aggregators(aggregators); if (!collectors.isEmpty()) { - final BucketCollector collector = BucketCollector.wrap(collectors); - collector.preCollection(); + Collector collector = BucketCollector.wrap(collectors); + ((BucketCollector)collector).preCollection(); + if (context.getProfilers() != null) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_AGGREGATION, + // TODO: report on child aggs as well + Collections.emptyList()); + } context.queryCollectors().put(AggregationPhase.class, collector); } } catch (IOException e) { @@ -116,6 +125,7 @@ public class AggregationPhase implements SearchPhase { BucketCollector globalsCollector = BucketCollector.wrap(globals); Query query = Queries.newMatchAllQuery(); Query searchFilter = context.searchFilter(context.types()); + if (searchFilter != null) { BooleanQuery filtered = new BooleanQuery.Builder() .add(query, Occur.MUST) @@ -124,8 +134,20 @@ public class AggregationPhase implements SearchPhase { query = filtered; } try { + final Collector collector; + if (context.getProfilers() == null) { + collector = globalsCollector; + } else { + InternalProfileCollector profileCollector = new InternalProfileCollector( + globalsCollector, CollectorResult.REASON_AGGREGATION_GLOBAL, + // TODO: report on sub collectors + Collections.emptyList()); + collector = profileCollector; + // start a new profile with this collector + context.getProfilers().addProfiler().setCollector(profileCollector); + } globalsCollector.preCollection(); - context.searcher().search(query, globalsCollector); + context.searcher().search(query, collector); } catch (Exception e) { throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e); } finally { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java b/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java index ee38e2b3610..c1c1bff1adb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.Collector; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.stream.StreamSupport; @@ -99,6 +100,11 @@ public abstract class BucketCollector implements Collector { } return false; } + + @Override + public String toString() { + return Arrays.toString(collectors); + } }; } } diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 465729ca714..3ea2d604b8a 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.elasticsearch.Version; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -91,6 +92,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField RESCORE_FIELD = new ParseField("rescore"); public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField EXT_FIELD = new ParseField("ext"); + public static final ParseField PROFILE_FIELD = new ParseField("profile"); private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder(); @@ -158,6 +160,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private BytesReference ext = null; + private boolean profile = false; + + /** * Constructs a new search source builder. */ @@ -475,6 +480,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return this; } + /** + * Should the query be profiled. Defaults to false + */ + public SearchSourceBuilder profile(boolean profile) { + this.profile = profile; + return this; + } + + /** + * Return whether to profile query execution, or {@code null} if + * unspecified. + */ + public boolean profile() { + return profile; + } + /** * Gets the bytes representing the rescore builders for this request. */ @@ -723,6 +744,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.fieldNames = fieldNames; } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { builder.sort(parser.text()); + } else if (context.parseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) { + builder.profile = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -931,6 +954,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.field(EXPLAIN_FIELD.getPreferredName(), explain); } + if (profile) { + builder.field("profile", true); + } + if (fetchSourceContext != null) { builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext); } @@ -1212,6 +1239,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (in.readBoolean()) { builder.ext = in.readBytesReference(); } + if (in.getVersion().onOrAfter(Version.V_2_2_0)) { + builder.profile = in.readBoolean(); + } else { + builder.profile = false; + } return builder; } @@ -1325,13 +1357,16 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (hasExt) { out.writeBytesReference(ext); } + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + out.writeBoolean(profile); + } } @Override public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, - size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version); + size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); } @Override @@ -1364,6 +1399,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ && Objects.equals(terminateAfter, other.terminateAfter) && Objects.equals(timeoutInMillis, other.timeoutInMillis) && Objects.equals(trackScores, other.trackScores) - && Objects.equals(version, other.version); + && Objects.equals(version, other.version) + && Objects.equals(profile, other.profile); } } diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index f76527163cb..835e6e71425 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -43,7 +43,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; @@ -52,9 +51,11 @@ import org.elasticsearch.search.fetch.FetchSearchResultProvider; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.profile.InternalProfileShardResults; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.profile.ProfileShardResult; import java.io.IOException; import java.util.ArrayList; @@ -410,6 +411,17 @@ public class SearchPhaseController extends AbstractComponent { } } + //Collect profile results + InternalProfileShardResults shardResults = null; + if (!queryResults.isEmpty() && firstResult.profileResults() != null) { + Map> profileResults = new HashMap<>(queryResults.size()); + for (AtomicArray.Entry entry : queryResults) { + String key = entry.value.queryResult().shardTarget().toString(); + profileResults.put(key, entry.value.queryResult().profileResults()); + } + shardResults = new InternalProfileShardResults(profileResults); + } + if (aggregations != null) { List pipelineAggregators = firstResult.pipelineAggregators(); if (pipelineAggregators != null) { @@ -427,7 +439,7 @@ public class SearchPhaseController extends AbstractComponent { InternalSearchHits searchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore); - return new InternalSearchResponse(searchHits, aggregations, suggest, timedOut, terminatedEarly); + return new InternalSearchResponse(searchHits, aggregations, suggest, shardResults, timedOut, terminatedEarly); } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 0a9b860edb7..a7bacb64d94 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -26,6 +26,9 @@ import org.apache.lucene.search.*; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; +import org.elasticsearch.search.profile.ProfileBreakdown; +import org.elasticsearch.search.profile.ProfileWeight; +import org.elasticsearch.search.profile.Profiler; import java.io.IOException; @@ -43,26 +46,44 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private final Engine.Searcher engineSearcher; - public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) { + // TODO revisit moving the profiler to inheritance or wrapping model in the future + private Profiler profiler; + + public ContextIndexSearcher(Engine.Searcher searcher, + QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) { super(searcher.reader()); in = searcher.searcher(); engineSearcher = searcher; setSimilarity(searcher.searcher().getSimilarity(true)); - setQueryCache(searchContext.getQueryCache()); - setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy()); + setQueryCache(queryCache); + setQueryCachingPolicy(queryCachingPolicy); } @Override public void close() { } + public void setProfiler(Profiler profiler) { + this.profiler = profiler; + } + public void setAggregatedDfs(AggregatedDfs aggregatedDfs) { this.aggregatedDfs = aggregatedDfs; } @Override public Query rewrite(Query original) throws IOException { - return in.rewrite(original); + if (profiler != null) { + profiler.startRewriteTime(); + } + + try { + return in.rewrite(original); + } finally { + if (profiler != null) { + profiler.stopAndAddRewriteTime(); + } + } } @Override @@ -72,8 +93,34 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { if (aggregatedDfs != null && needsScores) { // if scores are needed and we have dfs data then use it return super.createNormalizedWeight(query, needsScores); + } else if (profiler != null) { + // we need to use the createWeight method to insert the wrappers + return super.createNormalizedWeight(query, needsScores); + } else { + return in.createNormalizedWeight(query, needsScores); + } + } + + @Override + public Weight createWeight(Query query, boolean needsScores) throws IOException { + if (profiler != null) { + // createWeight() is called for each query in the tree, so we tell the queryProfiler + // each invocation so that it can build an internal representation of the query + // tree + ProfileBreakdown profile = profiler.getQueryBreakdown(query); + profile.startTime(ProfileBreakdown.TimingType.CREATE_WEIGHT); + final Weight weight; + try { + weight = super.createWeight(query, needsScores); + } finally { + profile.stopAndRecordTime(); + profiler.pollLastQuery(); + } + return new ProfileWeight(query, weight, profile); + } else { + // needs to be 'super', not 'in' in order to use aggregated DFS + return super.createWeight(query, needsScores); } - return in.createNormalizedWeight(query, needsScores); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 1174fcdd8a9..2d3f6590629 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -58,6 +58,8 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.query.QueryPhaseExecutionException; +import org.elasticsearch.search.profile.Profiler; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -129,10 +131,10 @@ public class DefaultSearchContext extends SearchContext { private List rescore; private SearchLookup searchLookup; private volatile long keepAlive; - private ScoreDoc lastEmittedDoc; private final long originNanoTime = System.nanoTime(); private volatile long lastAccessTime = -1; private InnerHitsContext innerHitsContext; + private Profilers profilers; private final Map subPhaseContexts = new HashMap<>(); private final Map, Collector> queryCollectors = new HashMap<>(); @@ -158,7 +160,7 @@ public class DefaultSearchContext extends SearchContext { this.fetchResult = new FetchSearchResult(id, shardTarget); this.indexShard = indexShard; this.indexService = indexService; - this.searcher = new ContextIndexSearcher(this, engineSearcher); + this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); this.timeEstimateCounter = timeEstimateCounter; this.timeoutInMillis = timeout.millis(); } @@ -724,5 +726,11 @@ public class DefaultSearchContext extends SearchContext { } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Profilers getProfilers() { + return profilers; + } + + public void setProfilers(Profilers profilers) { + this.profilers = profilers; + } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 7225c7b32bd..1f04d013401 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -49,6 +48,7 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -517,8 +517,11 @@ public abstract class FilteredSearchContext extends SearchContext { } @Override - public Map, Collector> queryCollectors() { return in.queryCollectors();} + public Profilers getProfilers() { + return in.getProfilers(); + } @Override - public QueryCache getQueryCache() { return in.getQueryCache();} + public Map, Collector> queryCollectors() { return in.queryCollectors();} + } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index 7b73772f9da..b8255e0bb52 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -28,9 +28,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.profile.InternalProfileShardResults; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.search.internal.InternalSearchHits.readSearchHits; @@ -40,7 +45,7 @@ import static org.elasticsearch.search.internal.InternalSearchHits.readSearchHit public class InternalSearchResponse implements Streamable, ToXContent { public static InternalSearchResponse empty() { - return new InternalSearchResponse(InternalSearchHits.empty(), null, null, false, null); + return new InternalSearchResponse(InternalSearchHits.empty(), null, null, null, false, null); } private InternalSearchHits hits; @@ -49,6 +54,8 @@ public class InternalSearchResponse implements Streamable, ToXContent { private Suggest suggest; + private InternalProfileShardResults profileResults; + private boolean timedOut; private Boolean terminatedEarly = null; @@ -56,10 +63,12 @@ public class InternalSearchResponse implements Streamable, ToXContent { private InternalSearchResponse() { } - public InternalSearchResponse(InternalSearchHits hits, InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly) { + public InternalSearchResponse(InternalSearchHits hits, InternalAggregations aggregations, Suggest suggest, + InternalProfileShardResults profileResults, boolean timedOut, Boolean terminatedEarly) { this.hits = hits; this.aggregations = aggregations; this.suggest = suggest; + this.profileResults = profileResults; this.timedOut = timedOut; this.terminatedEarly = terminatedEarly; } @@ -84,6 +93,19 @@ public class InternalSearchResponse implements Streamable, ToXContent { return suggest; } + /** + * Returns the profile results for this search response (including all shards). + * An empty map is returned if profiling was not enabled + * + * @return Profile results + */ + public Map> profile() { + if (profileResults == null) { + return Collections.emptyMap(); + } + return profileResults.getShardResults(); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { hits.toXContent(builder, params); @@ -93,6 +115,9 @@ public class InternalSearchResponse implements Streamable, ToXContent { if (suggest != null) { suggest.toXContent(builder, params); } + if (profileResults != null) { + profileResults.toXContent(builder, params); + } return builder; } @@ -114,6 +139,12 @@ public class InternalSearchResponse implements Streamable, ToXContent { timedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); + + if (in.getVersion().onOrAfter(Version.V_2_2_0) && in.readBoolean()) { + profileResults = new InternalProfileShardResults(in); + } else { + profileResults = null; + } } @Override @@ -134,5 +165,14 @@ public class InternalSearchResponse implements Streamable, ToXContent { out.writeBoolean(timedOut); out.writeOptionalBoolean(terminatedEarly); + + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + if (profileResults == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + profileResults.writeTo(out); + } + } } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 0f61b2bc6a3..4e4e9dd5dd7 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -56,6 +55,7 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -303,6 +303,11 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple public abstract FetchSearchResult fetchResult(); + /** + * Return a handle over the profilers for the current search request, or {@code null} if profiling is not enabled. + */ + public abstract Profilers getProfilers(); + /** * Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object * is function of the provided {@link Lifetime}. @@ -367,5 +372,4 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple CONTEXT } - public abstract QueryCache getQueryCache(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 47791aeddfa..9d15dfd5790 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -71,6 +71,8 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S private Boolean requestCache; private long nowInMillis; + private boolean profile; + ShardSearchLocalRequest() { } @@ -165,6 +167,16 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S return scroll; } + @Override + public void setProfile(boolean profile) { + this.profile = profile; + } + + @Override + public boolean isProfile() { + return profile; + } + @SuppressWarnings("unchecked") protected void innerReadFrom(StreamInput in) throws IOException { index = in.readString(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index fb631b08270..b1730b6a14e 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -59,6 +59,17 @@ public interface ShardSearchRequest extends HasContextAndHeaders { Scroll scroll(); + /** + * Sets if this shard search needs to be profiled or not + * @param profile True if the shard should be profiled + */ + void setProfile(boolean profile); + + /** + * Returns true if this shard search is being profiled or not + */ + boolean isProfile(); + /** * Returns the cache key for this shard search request, based on its content */ diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index 279d9d6bd20..0f9c0ced411 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -150,4 +150,14 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha public BytesReference cacheKey() throws IOException { return shardSearchLocalRequest.cacheKey(); } + + @Override + public void setProfile(boolean profile) { + shardSearchLocalRequest.setProfile(profile); + } + + @Override + public boolean isProfile() { + return shardSearchLocalRequest.isProfile(); + } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java new file mode 100644 index 00000000000..4949c6388d2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +/** + * Public interface and serialization container for profiled timings of the + * Collectors used in the search. Children CollectorResult's may be + * embedded inside of a parent CollectorResult + */ +public class CollectorResult implements ToXContent, Writeable { + + public static final String REASON_SEARCH_COUNT = "search_count"; + public static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; + public static final String REASON_SEARCH_TERMINATE_AFTER_COUNT = "search_terminate_after_count"; + public static final String REASON_SEARCH_POST_FILTER = "search_post_filter"; + public static final String REASON_SEARCH_MIN_SCORE = "search_min_score"; + public static final String REASON_SEARCH_MULTI = "search_multi"; + public static final String REASON_SEARCH_TIMEOUT = "search_timeout"; + public static final String REASON_AGGREGATION = "aggregation"; + public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global"; + + private static final ParseField NAME = new ParseField("name"); + private static final ParseField REASON = new ParseField("reason"); + private static final ParseField TIME = new ParseField("time"); + private static final ParseField CHILDREN = new ParseField("children"); + + /** + * A more friendly representation of the Collector's class name + */ + private final String collectorName; + + /** + * A "hint" to help provide some context about this Collector + */ + private final String reason; + + /** + * The total elapsed time for this Collector + */ + private final Long time; + + /** + * A list of children collectors "embedded" inside this collector + */ + private List children; + + public CollectorResult(String collectorName, String reason, Long time, List children) { + this.collectorName = collectorName; + this.reason = reason; + this.time = time; + this.children = children; + } + + public CollectorResult(StreamInput in) throws IOException { + this.collectorName = in.readString(); + this.reason = in.readString(); + this.time = in.readLong(); + int size = in.readVInt(); + this.children = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + CollectorResult child = new CollectorResult(in); + this.children.add(child); + } + } + + /** + * @return the profiled time for this collector (inclusive of children) + */ + public long getTime() { + return this.time; + } + + /** + * @return a human readable "hint" about what this collector was used for + */ + public String getReason() { + return this.reason; + } + + /** + * @return the lucene class name of the collector + */ + public String getName() { + return this.collectorName; + } + + /** + * @return a list of children collectors + */ + public List getProfiledChildren() { + return children; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder = builder.startObject() + .field(NAME.getPreferredName(), toString()) + .field(REASON.getPreferredName(), reason) + .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0))); + + if (!children.isEmpty()) { + builder = builder.startArray(CHILDREN.getPreferredName()); + for (CollectorResult child : children) { + builder = child.toXContent(builder, params); + } + builder = builder.endArray(); + } + builder = builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(collectorName); + out.writeString(reason); + out.writeLong(time); + out.writeVInt(children.size()); + for (CollectorResult child : children) { + child.writeTo(out); + } + } + + @Override + public Object readFrom(StreamInput in) throws IOException { + return new CollectorResult(in); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java new file mode 100644 index 00000000000..132731f37c6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * This class wraps a Lucene Collector and times the execution of: + * - setScorer() + * - collect() + * - doSetNextReader() + * - needsScores() + * + * InternalProfiler facilitates the linking of the the Collector graph + */ +public class InternalProfileCollector implements Collector { + + /** + * A more friendly representation of the Collector's class name + */ + private final String collectorName; + + /** + * A "hint" to help provide some context about this Collector + */ + private final String reason; + + /** The wrapped collector */ + private final ProfileCollector collector; + + /** + * A list of "embedded" children collectors + */ + private final List children; + + public InternalProfileCollector(Collector collector, String reason, List children) { + this.collector = new ProfileCollector(collector); + this.reason = reason; + this.collectorName = deriveCollectorName(collector); + this.children = children; + } + + /** + * @return the profiled time for this collector (inclusive of children) + */ + public long getTime() { + return collector.getTime(); + } + + /** + * @return a human readable "hint" about what this collector was used for + */ + public String getReason() { + return this.reason; + } + + /** + * @return the lucene class name of the collector + */ + public String getName() { + return this.collectorName; + } + + /** + * Creates a human-friendly representation of the Collector name. + * + * Bucket Collectors use the aggregation name in their toString() method, + * which makes the profiled output a bit nicer. + * + * @param c The Collector to derive a name from + * @return A (hopefully) prettier name + */ + private String deriveCollectorName(Collector c) { + String s = c.getClass().getSimpleName(); + + // MutiCollector which wraps multiple BucketCollectors is generated + // via an anonymous class, so this corrects the lack of a name by + // asking the enclosingClass + if (s.equals("")) { + s = c.getClass().getEnclosingClass().getSimpleName(); + } + + // Aggregation collector toString()'s include the user-defined agg name + if (reason.equals(CollectorResult.REASON_AGGREGATION) || reason.equals(CollectorResult.REASON_AGGREGATION_GLOBAL)) { + s += ": [" + c.toString() + "]"; + } + return s; + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + return collector.getLeafCollector(context); + } + + @Override + public boolean needsScores() { + return collector.needsScores(); + } + + public CollectorResult getCollectorTree() { + return InternalProfileCollector.doGetCollectorTree(this); + } + + private static CollectorResult doGetCollectorTree(InternalProfileCollector collector) { + List childResults = new ArrayList<>(collector.children.size()); + for (InternalProfileCollector child : collector.children) { + CollectorResult result = doGetCollectorTree(child); + childResults.add(result); + } + return new CollectorResult(collector.getName(), collector.getReason(), collector.getTime(), childResults); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java new file mode 100644 index 00000000000..2ab3b632119 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java @@ -0,0 +1,89 @@ +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; + +/** + * A container class to hold all the profile results across all shards. Internally + * holds a map of shard ID -> Profiled results + */ +public final class InternalProfileShardResults implements Writeable, ToXContent{ + + private Map> shardResults; + + public InternalProfileShardResults(Map> shardResults) { + Map> transformed = + shardResults.entrySet() + .stream() + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> Collections.unmodifiableList(e.getValue())) + ); + this.shardResults = Collections.unmodifiableMap(transformed); + } + + public InternalProfileShardResults(StreamInput in) throws IOException { + int size = in.readInt(); + shardResults = new HashMap<>(size); + + for (int i = 0; i < size; i++) { + String key = in.readString(); + int shardResultsSize = in.readInt(); + + List shardResult = new ArrayList<>(shardResultsSize); + + for (int j = 0; j < shardResultsSize; j++) { + ProfileShardResult result = new ProfileShardResult(in); + shardResult.add(result); + } + shardResults.put(key, shardResult); + } + } + + public Map> getShardResults() { + return this.shardResults; + } + + @Override + public InternalProfileShardResults readFrom(StreamInput in) throws IOException { + return new InternalProfileShardResults(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(shardResults.size()); + for (Map.Entry> entry : shardResults.entrySet()) { + out.writeString(entry.getKey()); + out.writeInt(entry.getValue().size()); + + for (ProfileShardResult result : entry.getValue()) { + result.writeTo(out); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("profile").startArray("shards"); + + for (Map.Entry> entry : shardResults.entrySet()) { + builder.startObject().field("id",entry.getKey()).startArray("searches"); + for (ProfileShardResult result : entry.getValue()) { + builder.startObject(); + result.toXContent(builder, params); + builder.endObject(); + } + builder.endArray().endObject(); + } + + builder.endArray().endObject(); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java new file mode 100644 index 00000000000..4bc8a85a781 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java @@ -0,0 +1,235 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.Query; + +import java.util.*; +import java.util.concurrent.LinkedBlockingDeque; + +/** + * This class tracks the dependency tree for queries (scoring and rewriting) and + * generates {@link ProfileBreakdown} for each node in the tree. It also finalizes the tree + * and returns a list of {@link ProfileResult} that can be serialized back to the client + */ +final class InternalProfileTree { + + private ArrayList timings; + + /** Maps the Query to it's list of children. This is basically the dependency tree */ + private ArrayList> tree; + + /** A list of the original queries, keyed by index position */ + private ArrayList queries; + + /** A list of top-level "roots". Each root can have its own tree of profiles */ + private ArrayList roots; + + /** Rewrite time */ + private long rewriteTime; + private long rewriteScratch; + + /** A temporary stack used to record where we are in the dependency tree. Only used by scoring queries */ + private Deque stack; + + private int currentToken = 0; + + public InternalProfileTree() { + timings = new ArrayList<>(10); + stack = new LinkedBlockingDeque<>(10); + tree = new ArrayList<>(10); + queries = new ArrayList<>(10); + roots = new ArrayList<>(10); + } + + /** + * Returns a {@link ProfileBreakdown} for a scoring query. Scoring queries (e.g. those + * that are past the rewrite phase and are now being wrapped by createWeight() ) follow + * a recursive progression. We can track the dependency tree by a simple stack + * + * The only hiccup is that the first scoring query will be identical to the last rewritten + * query, so we need to take special care to fix that + * + * @param query The scoring query we wish to profile + * @return A ProfileBreakdown for this query + */ + public ProfileBreakdown getQueryBreakdown(Query query) { + int token = currentToken; + + boolean stackEmpty = stack.isEmpty(); + + // If the stack is empty, we are a new root query + if (stackEmpty) { + + // We couldn't find a rewritten query to attach to, so just add it as a + // top-level root. This is just a precaution: it really shouldn't happen. + // We would only get here if a top-level query that never rewrites for some reason. + roots.add(token); + + // Increment the token since we are adding a new node, but notably, do not + // updateParent() because this was added as a root + currentToken += 1; + stack.add(token); + + return addDependencyNode(query, token); + } + + updateParent(token); + + // Increment the token since we are adding a new node + currentToken += 1; + stack.add(token); + + return addDependencyNode(query, token); + } + + /** + * Begin timing a query for a specific Timing context + */ + public void startRewriteTime() { + assert rewriteScratch == 0; + rewriteScratch = System.nanoTime(); + } + + /** + * Halt the timing process and add the elapsed rewriting time. + * startRewriteTime() must be called for a particular context prior to calling + * stopAndAddRewriteTime(), otherwise the elapsed time will be negative and + * nonsensical + * + * @return The elapsed time + */ + public long stopAndAddRewriteTime() { + long time = Math.max(1, System.nanoTime() - rewriteScratch); + rewriteTime += time; + rewriteScratch = 0; + return time; + } + + /** + * Helper method to add a new node to the dependency tree. + * + * Initializes a new list in the dependency tree, saves the query and + * generates a new {@link ProfileBreakdown} to track the timings + * of this query + * + * @param query The query to profile + * @param token The assigned token for this query + * @return A ProfileBreakdown to profile this query + */ + private ProfileBreakdown addDependencyNode(Query query, int token) { + + // Add a new slot in the dependency tree + tree.add(new ArrayList<>(5)); + + // Save our query for lookup later + queries.add(query); + + ProfileBreakdown queryTimings = new ProfileBreakdown(); + timings.add(token, queryTimings); + return queryTimings; + } + + /** + * Removes the last (e.g. most recent) value on the stack + */ + public void pollLast() { + stack.pollLast(); + } + + /** + * After the query has been run and profiled, we need to merge the flat timing map + * with the dependency graph to build a data structure that mirrors the original + * query tree + * + * @return a hierarchical representation of the profiled query tree + */ + public List getQueryTree() { + ArrayList results = new ArrayList<>(5); + for (Integer root : roots) { + results.add(doGetQueryTree(root)); + } + return results; + } + + /** + * Recursive helper to finalize a node in the dependency tree + * @param token The node we are currently finalizing + * @return A hierarchical representation of the tree inclusive of children at this level + */ + private ProfileResult doGetQueryTree(int token) { + Query query = queries.get(token); + ProfileBreakdown breakdown = timings.get(token); + Map timings = breakdown.toTimingMap(); + List children = tree.get(token); + List childrenProfileResults = Collections.emptyList(); + + if (children != null) { + childrenProfileResults = new ArrayList<>(children.size()); + for (Integer child : children) { + ProfileResult childNode = doGetQueryTree(child); + childrenProfileResults.add(childNode); + } + } + + // TODO this would be better done bottom-up instead of top-down to avoid + // calculating the same times over and over...but worth the effort? + long nodeTime = getNodeTime(timings, childrenProfileResults); + String queryDescription = query.getClass().getSimpleName(); + String luceneName = query.toString(); + return new ProfileResult(queryDescription, luceneName, timings, childrenProfileResults, nodeTime); + } + + public long getRewriteTime() { + return rewriteTime; + } + + /** + * Internal helper to add a child to the current parent node + * + * @param childToken The child to add to the current parent + */ + private void updateParent(int childToken) { + Integer parent = stack.peekLast(); + ArrayList parentNode = tree.get(parent); + parentNode.add(childToken); + tree.set(parent, parentNode); + } + + /** + * Internal helper to calculate the time of a node, inclusive of children + * + * @param timings A map of breakdown timing for the node + * @param children All children profile results at this node + * @return The total time at this node, inclusive of children + */ + private static long getNodeTime(Map timings, List children) { + long nodeTime = 0; + for (long time : timings.values()) { + nodeTime += time; + } + + // Then add up our children + for (ProfileResult child : children) { + nodeTime += getNodeTime(child.getTimeBreakdown(), child.getProfiledChildren()); + } + return nodeTime; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java new file mode 100644 index 00000000000..55ad77b6937 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * A record of timings for the various operations that may happen during query execution. + * A node's time may be composed of several internal attributes (rewriting, weighting, + * scoring, etc). + */ +public final class ProfileBreakdown { + + /** Enumeration of all supported timing types. */ + public enum TimingType { + CREATE_WEIGHT, + BUILD_SCORER, + NEXT_DOC, + ADVANCE, + MATCH, + SCORE; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + /** + * The accumulated timings for this query node + */ + private final long[] timings; + + /** Scrach to store the current timing type. */ + private TimingType currentTimingType; + + /** + * The temporary scratch space for holding start-times + */ + private long scratch; + + /** Sole constructor. */ + public ProfileBreakdown() { + timings = new long[TimingType.values().length]; + } + + /** + * Begin timing a query for a specific Timing context + * @param timing The timing context being profiled + */ + public void startTime(TimingType timing) { + assert currentTimingType == null; + assert scratch == 0; + currentTimingType = timing; + scratch = System.nanoTime(); + } + + /** + * Halt the timing process and save the elapsed time. + * startTime() must be called for a particular context prior to calling + * stopAndRecordTime(), otherwise the elapsed time will be negative and + * nonsensical + * + * @return The elapsed time + */ + public long stopAndRecordTime() { + long time = Math.max(1, System.nanoTime() - scratch); + timings[currentTimingType.ordinal()] += time; + currentTimingType = null; + scratch = 0L; + return time; + } + + /** Convert this record to a map from {@link TimingType} to times. */ + public Map toTimingMap() { + Map map = new HashMap<>(); + for (TimingType timingType : TimingType.values()) { + map.put(timingType.toString(), timings[timingType.ordinal()]); + } + return Collections.unmodifiableMap(map); + } + + /** + * Add other's timings into this breakdown + * @param other Another Breakdown to merge with this one + */ + public void merge(ProfileBreakdown other) { + assert(timings.length == other.timings.length); + for (int i = 0; i < timings.length; ++i) { + timings[i] += other.timings[i]; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java new file mode 100644 index 00000000000..7d7538c9117 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.FilterCollector; +import org.apache.lucene.search.FilterLeafCollector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorer; + +import java.io.IOException; + +/** A collector that profiles how much time is spent calling it. */ +final class ProfileCollector extends FilterCollector { + + private long time; + + /** Sole constructor. */ + public ProfileCollector(Collector in) { + super(in); + } + + /** Return the wrapped collector. */ + public Collector getDelegate() { + return in; + } + + @Override + public boolean needsScores() { + final long start = System.nanoTime(); + try { + return super.needsScores(); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + final long start = System.nanoTime(); + final LeafCollector inLeafCollector; + try { + inLeafCollector = super.getLeafCollector(context); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + return new FilterLeafCollector(inLeafCollector) { + + @Override + public void collect(int doc) throws IOException { + final long start = System.nanoTime(); + try { + super.collect(doc); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + final long start = System.nanoTime(); + try { + super.setScorer(scorer); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + }; + } + + /** Return the total time spent on this collector. */ + public long getTime() { + return time; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java new file mode 100644 index 00000000000..4c8752fdaf2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +/** + * This class is the internal representation of a profiled Query, corresponding + * to a single node in the query tree. It is built after the query has finished executing + * and is merely a structured representation, rather than the entity that collects the timing + * profile (see InternalProfiler for that) + * + * Each InternalProfileResult has a List of InternalProfileResults, which will contain + * "children" queries if applicable + */ +final class ProfileResult implements Writeable, ToXContent { + + private static final ParseField QUERY_TYPE = new ParseField("query_type"); + private static final ParseField LUCENE_DESCRIPTION = new ParseField("lucene"); + private static final ParseField NODE_TIME = new ParseField("time"); + private static final ParseField CHILDREN = new ParseField("children"); + private static final ParseField BREAKDOWN = new ParseField("breakdown"); + + private final String queryType; + private final String luceneDescription; + private final Map timings; + private final long nodeTime; + private final List children; + + public ProfileResult(String queryType, String luceneDescription, Map timings, List children, long nodeTime) { + this.queryType = queryType; + this.luceneDescription = luceneDescription; + this.timings = timings; + this.children = children; + this.nodeTime = nodeTime; + } + + public ProfileResult(StreamInput in) throws IOException{ + this.queryType = in.readString(); + this.luceneDescription = in.readString(); + this.nodeTime = in.readLong(); + + int timingsSize = in.readVInt(); + this.timings = new HashMap<>(timingsSize); + for (int i = 0; i < timingsSize; ++i) { + timings.put(in.readString(), in.readLong()); + } + + int size = in.readVInt(); + this.children = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + children.add(new ProfileResult(in)); + } + } + + /** + * Retrieve the lucene description of this query (e.g. the "explain" text) + */ + public String getLuceneDescription() { + return luceneDescription; + } + + /** + * Retrieve the name of the query (e.g. "TermQuery") + */ + public String getQueryName() { + return queryType; + } + + /** + * Returns the timing breakdown for this particular query node + */ + public Map getTimeBreakdown() { + return Collections.unmodifiableMap(timings); + } + + /** + * Returns the total time (inclusive of children) for this query node. + * + * @return elapsed time in nanoseconds + */ + public long getTime() { + return nodeTime; + } + + /** + * Returns a list of all profiled children queries + */ + public List getProfiledChildren() { + return Collections.unmodifiableList(children); + } + + @Override + public ProfileResult readFrom(StreamInput in) throws IOException { + return new ProfileResult(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(queryType); + out.writeString(luceneDescription); + out.writeLong(nodeTime); // not Vlong because can be negative + out.writeVInt(timings.size()); + for (Map.Entry entry : timings.entrySet()) { + out.writeString(entry.getKey()); + out.writeLong(entry.getValue()); + } + out.writeVInt(children.size()); + for (ProfileResult child : children) { + child.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder = builder.startObject() + .field(QUERY_TYPE.getPreferredName(), queryType) + .field(LUCENE_DESCRIPTION.getPreferredName(), luceneDescription) + .field(NODE_TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double)(getTime() / 1000000.0))) + .field(BREAKDOWN.getPreferredName(), timings); + + if (!children.isEmpty()) { + builder = builder.startArray(CHILDREN.getPreferredName()); + for (ProfileResult child : children) { + builder = child.toXContent(builder, params); + } + builder = builder.endArray(); + } + + builder = builder.endObject(); + return builder; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java new file mode 100644 index 00000000000..b0dc6f2cd4e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; + +import java.io.IOException; +import java.util.Collection; + +/** + * {@link Scorer} wrapper that will compute how much time is spent on moving + * the iterator, confirming matches and computing scores. + */ +final class ProfileScorer extends Scorer { + + private final Scorer scorer; + private ProfileWeight profileWeight; + private final ProfileBreakdown profile; + + ProfileScorer(ProfileWeight w, Scorer scorer, ProfileBreakdown profile) throws IOException { + super(w); + this.scorer = scorer; + this.profileWeight = w; + this.profile = profile; + } + + @Override + public int docID() { + return scorer.docID(); + } + + @Override + public int advance(int target) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.ADVANCE); + try { + return scorer.advance(target); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int nextDoc() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); + try { + return scorer.nextDoc(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public float score() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.SCORE); + try { + return scorer.score(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int freq() throws IOException { + return scorer.freq(); + } + + @Override + public long cost() { + return scorer.cost(); + } + + @Override + public Weight getWeight() { + return profileWeight; + } + + @Override + public Collection getChildren() { + return scorer.getChildren(); + } + + @Override + public TwoPhaseIterator asTwoPhaseIterator() { + final TwoPhaseIterator in = scorer.asTwoPhaseIterator(); + if (in == null) { + return null; + } + final DocIdSetIterator inApproximation = in.approximation(); + final DocIdSetIterator approximation = new DocIdSetIterator() { + + @Override + public int advance(int target) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.ADVANCE); + try { + return inApproximation.advance(target); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int nextDoc() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); + try { + return inApproximation.nextDoc(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int docID() { + return inApproximation.docID(); + } + + @Override + public long cost() { + return inApproximation.cost(); + } + }; + return new TwoPhaseIterator(approximation) { + @Override + public boolean matches() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.MATCH); + try { + return in.matches(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public float matchCost() { + return in.matchCost(); + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java new file mode 100644 index 00000000000..6e005babb3c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.*; + +/** + * A container class to hold the profile results for a single shard in the request. + * Contains a list of query profiles, a collector tree and a total rewrite tree. + */ +public final class ProfileShardResult implements Writeable, ToXContent { + + private final List profileResults; + + private final CollectorResult profileCollector; + + private final long rewriteTime; + + public ProfileShardResult(List profileResults, long rewriteTime, + CollectorResult profileCollector) { + assert(profileCollector != null); + this.profileResults = profileResults; + this.profileCollector = profileCollector; + this.rewriteTime = rewriteTime; + } + + public ProfileShardResult(StreamInput in) throws IOException { + int profileSize = in.readVInt(); + profileResults = new ArrayList<>(profileSize); + for (int j = 0; j < profileSize; j++) { + profileResults.add(new ProfileResult(in)); + } + + profileCollector = new CollectorResult(in); + rewriteTime = in.readLong(); + } + + public List getQueryResults() { + return Collections.unmodifiableList(profileResults); + } + + public long getRewriteTime() { + return rewriteTime; + } + + public CollectorResult getCollectorResult() { + return profileCollector; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("query"); + for (ProfileResult p : profileResults) { + p.toXContent(builder, params); + } + builder.endArray(); + builder.field("rewrite_time", rewriteTime); + builder.startArray("collector"); + profileCollector.toXContent(builder, params); + builder.endArray(); + return builder; + } + + @Override + public ProfileShardResult readFrom(StreamInput in) throws IOException { + return new ProfileShardResult(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(profileResults.size()); + for (ProfileResult p : profileResults) { + p.writeTo(out); + } + profileCollector.writeTo(out); + out.writeLong(rewriteTime); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java new file mode 100644 index 00000000000..1ce5cd721fe --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; + +import java.io.IOException; +import java.util.Set; + +/** + * Weight wrapper that will compute how much time it takes to build the + * {@link Scorer} and then return a {@link Scorer} that is wrapped in + * order to compute timings as well. + */ +public final class ProfileWeight extends Weight { + + private final Weight subQueryWeight; + private final ProfileBreakdown profile; + + public ProfileWeight(Query query, Weight subQueryWeight, ProfileBreakdown profile) throws IOException { + super(query); + this.subQueryWeight = subQueryWeight; + this.profile = profile; + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.BUILD_SCORER); + final Scorer subQueryScorer; + try { + subQueryScorer = subQueryWeight.scorer(context); + } finally { + profile.stopAndRecordTime(); + } + if (subQueryScorer == null) { + return null; + } + + return new ProfileScorer(this, subQueryScorer, profile); + } + + @Override + public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(context); + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + return subQueryWeight.explain(context, doc); + } + + @Override + public float getValueForNormalization() throws IOException { + return subQueryWeight.getValueForNormalization(); + } + + @Override + public void normalize(float norm, float topLevelBoost) { + subQueryWeight.normalize(norm, topLevelBoost); + } + + @Override + public void extractTerms(Set set) { + subQueryWeight.extractTerms(set); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/Profiler.java b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java new file mode 100644 index 00000000000..bf0c9ec01b6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.Query; + +import java.util.*; + +/** + * This class acts as a thread-local storage for profiling a query. It also + * builds a representation of the query tree which is built constructed + * "online" as the weights are wrapped by ContextIndexSearcher. This allows us + * to know the relationship between nodes in tree without explicitly + * walking the tree or pre-wrapping everything + * + * A Profiler is associated with every Search, not per Search-Request. E.g. a + * request may execute two searches (query + global agg). A Profiler just + * represents one of those + */ +public final class Profiler { + + private final InternalProfileTree queryTree = new InternalProfileTree(); + + /** + * The root Collector used in the search + */ + private InternalProfileCollector collector; + + public Profiler() {} + + /** Set the collector that is associated with this profiler. */ + public void setCollector(InternalProfileCollector collector) { + if (this.collector != null) { + throw new IllegalStateException("The collector can only be set once."); + } + this.collector = Objects.requireNonNull(collector); + } + + /** + * Get the {@link ProfileBreakdown} for the given query, potentially creating it if it did not exist. + * This should only be used for queries that will be undergoing scoring. Do not use it to profile the + * rewriting phase + */ + public ProfileBreakdown getQueryBreakdown(Query query) { + return queryTree.getQueryBreakdown(query); + } + + /** + * Begin timing the rewrite phase of a request. All rewrites are accumulated together into a + * single metric + */ + public void startRewriteTime() { + queryTree.startRewriteTime(); + } + + /** + * Stop recording the current rewrite and add it's time to the total tally, returning the + * cumulative time so far. + * + * @return cumulative rewrite time + */ + public long stopAndAddRewriteTime() { + return queryTree.stopAndAddRewriteTime(); + } + + /** + * Removes the last (e.g. most recent) query on the stack. This should only be called for scoring + * queries, not rewritten queries + */ + public void pollLastQuery() { + queryTree.pollLast(); + } + + /** + * @return a hierarchical representation of the profiled query tree + */ + public List getQueryTree() { + return queryTree.getQueryTree(); + } + + /** + * @return total time taken to rewrite all queries in this profile + */ + public long getRewriteTime() { + return queryTree.getRewriteTime(); + } + + /** + * Return the current root Collector for this search + */ + public CollectorResult getCollector() { + return collector.getCollectorTree(); + } + + /** + * Helper method to convert Profiler into InternalProfileShardResults, which can be + * serialized to other nodes, emitted as JSON, etc. + * + * @param profilers A list of Profilers to convert into InternalProfileShardResults + * @return A list of corresponding InternalProfileShardResults + */ + public static List buildShardResults(List profilers) { + List results = new ArrayList<>(profilers.size()); + for (Profiler profiler : profilers) { + ProfileShardResult result = new ProfileShardResult( + profiler.getQueryTree(), profiler.getRewriteTime(), profiler.getCollector()); + results.add(result); + } + return results; + } + + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/Profilers.java b/core/src/main/java/org/elasticsearch/search/profile/Profilers.java new file mode 100644 index 00000000000..0fb7d9ac1c9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.search.internal.ContextIndexSearcher; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** Wrapper around several {@link Profiler}s that makes management easier. */ +public final class Profilers { + + private final ContextIndexSearcher searcher; + private final List profilers; + + /** Sole constructor. This {@link Profilers} instance will initiall wrap one {@link Profiler}. */ + public Profilers(ContextIndexSearcher searcher) { + this.searcher = searcher; + this.profilers = new ArrayList<>(); + addProfiler(); + } + + /** Switch to a new profile. */ + public Profiler addProfiler() { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + profilers.add(profiler); + return profiler; + } + + /** Get the current profiler. */ + public Profiler getCurrent() { + return profilers.get(profilers.size() - 1); + } + + /** Return the list of all created {@link Profiler}s so far. */ + public List getProfilers() { + return Collections.unmodifiableList(profilers); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index ce8836cd336..08ff849871f 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -52,13 +52,16 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.profile.*; import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.sort.SortParseElement; import org.elasticsearch.search.sort.TrackScoresParseElement; import org.elasticsearch.search.suggest.SuggestPhase; +import java.util.AbstractList; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -124,6 +127,11 @@ public class QueryPhase implements SearchPhase { } suggestPhase.execute(searchContext); aggregationPhase.execute(searchContext); + + if (searchContext.getProfilers() != null) { + List shardResults = Profiler.buildShardResults(searchContext.getProfilers().getProfilers()); + searchContext.queryResult().profileResults(shardResults); + } } private static boolean returnsDocsInOrder(Query query, Sort sort) { @@ -147,6 +155,7 @@ public class QueryPhase implements SearchPhase { QuerySearchResult queryResult = searchContext.queryResult(); queryResult.searchTimedOut(false); + final boolean doProfile = searchContext.getProfilers() != null; final SearchType searchType = searchContext.searchType(); boolean rescore = false; try { @@ -162,9 +171,13 @@ public class QueryPhase implements SearchPhase { Callable topDocsCallable; assert query == searcher.rewrite(query); // already rewritten + if (searchContext.size() == 0) { // no matter what the value of from is final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); collector = totalHitCountCollector; + if (searchContext.getProfilers() != null) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_COUNT, Collections.emptyList()); + } topDocsCallable = new Callable() { @Override public TopDocs call() throws Exception { @@ -219,6 +232,9 @@ public class QueryPhase implements SearchPhase { topDocsCollector = TopScoreDocCollector.create(numDocs, lastEmittedDoc); } collector = topDocsCollector; + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TOP_HITS, Collections.emptyList()); + } topDocsCallable = new Callable() { @Override public TopDocs call() throws Exception { @@ -254,27 +270,57 @@ public class QueryPhase implements SearchPhase { final boolean terminateAfterSet = searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER; if (terminateAfterSet) { + final Collector child = collector; // throws Lucene.EarlyTerminationException when given count is reached collector = Lucene.wrapCountBasedEarlyTerminatingCollector(collector, searchContext.terminateAfter()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TERMINATE_AFTER_COUNT, + Collections.singletonList((InternalProfileCollector) child)); + } } if (searchContext.parsedPostFilter() != null) { + final Collector child = collector; // this will only get applied to the actual search collector and not // to any scoped collectors, also, it will only be applied to the main collector // since that is where the filter should only work final Weight filterWeight = searcher.createNormalizedWeight(searchContext.parsedPostFilter().query(), false); collector = new FilteredCollector(collector, filterWeight); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_POST_FILTER, + Collections.singletonList((InternalProfileCollector) child)); + } } // plug in additional collectors, like aggregations - List allCollectors = new ArrayList<>(); - allCollectors.add(collector); - allCollectors.addAll(searchContext.queryCollectors().values()); - collector = MultiCollector.wrap(allCollectors); + final List subCollectors = new ArrayList<>(); + subCollectors.add(collector); + subCollectors.addAll(searchContext.queryCollectors().values()); + collector = MultiCollector.wrap(subCollectors); + if (doProfile && collector instanceof InternalProfileCollector == false) { + // When there is a single collector to wrap, MultiCollector returns it + // directly, so only wrap in the case that there are several sub collectors + final List children = new AbstractList() { + @Override + public InternalProfileCollector get(int index) { + return (InternalProfileCollector) subCollectors.get(index); + } + @Override + public int size() { + return subCollectors.size(); + } + }; + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MULTI, children); + } // apply the minimum score after multi collector so we filter aggs as well if (searchContext.minimumScore() != null) { + final Collector child = collector; collector = new MinimumScoreCollector(collector, searchContext.minimumScore()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MIN_SCORE, + Collections.singletonList((InternalProfileCollector) child)); + } } if (collector.getClass() == TotalHitCountCollector.class) { @@ -319,13 +365,21 @@ public class QueryPhase implements SearchPhase { final boolean timeoutSet = searchContext.timeoutInMillis() != SearchService.NO_TIMEOUT.millis(); if (timeoutSet && collector != null) { // collector might be null if no collection is actually needed + final Collector child = collector; // TODO: change to use our own counter that uses the scheduler in ThreadPool // throws TimeLimitingCollector.TimeExceededException when timeout has reached collector = Lucene.wrapTimeLimitingCollector(collector, searchContext.timeEstimateCounter(), searchContext.timeoutInMillis()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TIMEOUT, + Collections.singletonList((InternalProfileCollector) child)); + } } try { if (collector != null) { + if (doProfile) { + searchContext.getProfilers().getCurrent().setCollector((InternalProfileCollector) collector); + } searcher.search(query, collector); } } catch (TimeLimitingCollector.TimeExceededException e) { @@ -343,7 +397,13 @@ public class QueryPhase implements SearchPhase { queryResult.topDocs(topDocsCallable.call()); + if (searchContext.getProfilers() != null) { + List shardResults = Profiler.buildShardResults(searchContext.getProfilers().getProfilers()); + searchContext.queryResult().profileResults(shardResults); + } + return rescore; + } catch (Throwable e) { throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); } diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 7f8d12a9c90..9223eb5a82d 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.query; import org.apache.lucene.search.TopDocs; +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -53,6 +56,7 @@ public class QuerySearchResult extends QuerySearchResultProvider { private Suggest suggest; private boolean searchTimedOut; private Boolean terminatedEarly = null; + private List profileShardResults; public QuerySearchResult() { @@ -120,6 +124,22 @@ public class QuerySearchResult extends QuerySearchResultProvider { this.aggregations = aggregations; } + /** + * Returns the profiled results for this search, or potentially null if result was empty + * @return The profiled results, or null + */ + public @Nullable List profileResults() { + return profileShardResults; + } + + /** + * Sets the finalized profiling results for this query + * @param shardResults The finalized profile + */ + public void profileResults(List shardResults) { + this.profileShardResults = shardResults; + } + public List pipelineAggregators() { return pipelineAggregators; } @@ -191,6 +211,15 @@ public class QuerySearchResult extends QuerySearchResultProvider { } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); + + if (in.getVersion().onOrAfter(Version.V_2_2_0) && in.readBoolean()) { + int profileSize = in.readVInt(); + profileShardResults = new ArrayList<>(profileSize); + for (int i = 0; i < profileSize; i++) { + ProfileShardResult result = new ProfileShardResult(in); + profileShardResults.add(result); + } + } } @Override @@ -229,5 +258,17 @@ public class QuerySearchResult extends QuerySearchResultProvider { } out.writeBoolean(searchTimedOut); out.writeOptionalBoolean(terminatedEarly); + + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + if (profileShardResults == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeVInt(profileShardResults.size()); + for (ProfileShardResult shardResult : profileShardResults) { + shardResult.writeTo(out); + } + } + } } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 90d4437fcea..207b626409b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -57,6 +57,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; +import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java new file mode 100644 index 00000000000..83f6efaa150 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class ProfileTests extends ESTestCase { + + static Directory dir; + static IndexReader reader; + static ContextIndexSearcher searcher; + + @BeforeClass + public static void before() throws IOException { + dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + final int numDocs = TestUtil.nextInt(random(), 1, 20); + for (int i = 0; i < numDocs; ++i) { + final int numHoles = random().nextInt(5); + for (int j = 0; j < numHoles; ++j) { + w.addDocument(new Document()); + } + Document doc = new Document(); + doc.add(new StringField("foo", "bar", Store.NO)); + w.addDocument(doc); + } + reader = w.getReader(); + w.close(); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); + } + + @AfterClass + public static void after() throws IOException { + IOUtils.close(reader, dir); + dir = null; + reader = null; + searcher = null; + } + + public void testBasic() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.search(query, 1); + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), equalTo(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testNoScoring() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.search(query, 1, Sort.INDEXORDER); // scores are not needed + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), equalTo(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testUseIndexStats() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.count(query); // will use index stats + List results = profiler.getQueryTree(); + assertEquals(0, results.size()); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testApproximations() throws IOException { + Profiler profiler = new Profiler(); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + // disable query caching since we want to test approximations, which won't + // be exposed on a cached entry + ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); + searcher.setProfiler(profiler); + Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()); + searcher.count(query); + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), greaterThan(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + + } + + public void testCollector() throws IOException { + TotalHitCountCollector collector = new TotalHitCountCollector(); + ProfileCollector profileCollector = new ProfileCollector(collector); + assertEquals(0, profileCollector.getTime()); + final LeafCollector leafCollector = profileCollector.getLeafCollector(reader.leaves().get(0)); + assertThat(profileCollector.getTime(), greaterThan(0L)); + long time = profileCollector.getTime(); + leafCollector.setScorer(Lucene.illegalScorer("dummy scorer")); + assertThat(profileCollector.getTime(), greaterThan(time)); + time = profileCollector.getTime(); + leafCollector.collect(0); + assertThat(profileCollector.getTime(), greaterThan(time)); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java new file mode 100644 index 00000000000..bb33364a751 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -0,0 +1,596 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.action.search.*; +import org.elasticsearch.search.SearchHit; +import org.apache.lucene.util.English; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.index.query.*; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.search.profile.RandomQueryGenerator.randomQueryBuilder; +import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; +import static org.hamcrest.Matchers.*; + + +public class QueryProfilerIT extends ESIntegTestCase { + + /** + * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, + * constructs 20-100 random queries and tries to profile them + */ + public void testProfileQuery() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + List stringFields = Arrays.asList("field1"); + List numericFields = Arrays.asList("field2"); + + indexRandom(true, docs); + + refresh(); + int iters = between(20, 100); + for (int i = 0; i < iters; i++) { + QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + for (Map.Entry> shard : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shard.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + + } + } + + /** + * This test generates 1-10 random queries and executes a profiled and non-profiled + * search for each query. It then does some basic sanity checking of score and hits + * to make sure the profiling doesn't interfere with the hits being returned + */ + public void testProfileMatchesRegular() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + List stringFields = Arrays.asList("field1"); + List numericFields = Arrays.asList("field2"); + + indexRandom(true, docs); + + refresh(); + int iters = between(1, 10); + for (int i = 0; i < iters; i++) { + QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); + logger.info(q.toString()); + + + SearchRequestBuilder vanilla = client().prepareSearch("test") + .setQuery(q) + .setProfile(false) + .addSort("_score", SortOrder.DESC) + .addSort("_uid", SortOrder.ASC) + .setPreference("_primary") + .setSearchType(SearchType.QUERY_THEN_FETCH); + + SearchRequestBuilder profile = client().prepareSearch("test") + .setQuery(q) + .setProfile(true) + .addSort("_score", SortOrder.DESC) + .addSort("_uid", SortOrder.ASC) + .setPreference("_primary") + .setSearchType(SearchType.QUERY_THEN_FETCH); + + MultiSearchResponse.Item[] responses = client().prepareMultiSearch() + .add(vanilla) + .add(profile) + .execute().actionGet().getResponses(); + + SearchResponse vanillaResponse = responses[0].getResponse(); + SearchResponse profileResponse = responses[1].getResponse(); + + float vanillaMaxScore = vanillaResponse.getHits().getMaxScore(); + float profileMaxScore = profileResponse.getHits().getMaxScore(); + if (Float.isNaN(vanillaMaxScore)) { + assertTrue("Vanilla maxScore is NaN but Profile is not [" + profileMaxScore + "]", + Float.isNaN(profileMaxScore)); + } else { + assertTrue("Profile maxScore of [" + profileMaxScore + "] is not close to Vanilla maxScore [" + vanillaMaxScore + "]", + nearlyEqual(vanillaMaxScore, profileMaxScore, 0.001)); + } + + assertThat("Profile totalHits of [" + profileResponse.getHits().totalHits() + "] is not close to Vanilla totalHits [" + vanillaResponse.getHits().totalHits() + "]", + vanillaResponse.getHits().getTotalHits(), equalTo(profileResponse.getHits().getTotalHits())); + + SearchHit[] vanillaHits = vanillaResponse.getHits().getHits(); + SearchHit[] profileHits = profileResponse.getHits().getHits(); + + for (int j = 0; j < vanillaHits.length; j++) { + assertThat("Profile hit #" + j + " has a different ID from Vanilla", + vanillaHits[j].getId(), equalTo(profileHits[j].getId())); + } + + } + } + + /** + * This test verifies that the output is reasonable for a simple, non-nested query + */ + public void testSimpleMatch() throws Exception { + createIndex("test"); + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + ensureGreen(); + + QueryBuilder q = QueryBuilders.matchQuery("field1", "one"); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + Map> p = resp.getProfileResults(); + assertNotNull(p); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "TermQuery"); + assertEquals(result.getLuceneDescription(), "field1:one"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * This test verifies that the output is reasonable for a nested query + */ + public void testBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")).must(QueryBuilders.matchQuery("field1", "two")); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + Map> p = resp.getProfileResults(); + assertNotNull(p); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "BooleanQuery"); + assertEquals(result.getLuceneDescription(), "+field1:one +field1:two"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + assertEquals(result.getProfiledChildren().size(), 2); + + // Check the children + List children = result.getProfiledChildren(); + assertEquals(children.size(), 2); + + ProfileResult childProfile = children.get(0); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:one"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + assertEquals(childProfile.getProfiledChildren().size(), 0); + + childProfile = children.get(1); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:two"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + + + } + + /** + * Tests a boolean query with no children clauses + */ + public void testEmptyBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boolQuery(); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * Tests a series of three nested boolean queries with a single "leaf" match query. + * The rewrite process will "collapse" this down to a single bool, so this tests to make sure + * nothing catastrophic happens during that fairly substantial rewrite + */ + public void testCollapsingBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")))); + + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testBoosting() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boostingQuery(QueryBuilders.matchQuery("field1", "one"), QueryBuilders.matchQuery("field1", "two")) + .boost(randomFloat()) + .negativeBoost(randomFloat()); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testDisMaxRange() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.disMaxQuery() + .boost(0.33703882f) + .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testRange() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testPhrase() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i) + " " + English.intToEnglish(i+1), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.matchPhraseQuery("field1", "one two"); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setIndices("test") + .setTypes("type1") + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + if (resp.getShardFailures().length > 0) { + for (ShardSearchFailure f : resp.getShardFailures()) { + logger.error(f.toString()); + } + fail(); + } + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * This test makes sure no profile results are returned when profiling is disabled + */ + public void testNoProfile() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + refresh(); + QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(false).execute().actionGet(); + assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); + } + +} + diff --git a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java new file mode 100644 index 00000000000..fb8cd40ce52 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java @@ -0,0 +1,266 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.util.English; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.query.*; + +import java.util.ArrayList; +import java.util.List; + +import static com.carrotsearch.randomizedtesting.RandomizedTest.*; +import static org.junit.Assert.assertTrue; + + +public class RandomQueryGenerator { + public static QueryBuilder randomQueryBuilder(List stringFields, List numericFields, int numDocs, int depth) { + assertTrue("Must supply at least one string field", stringFields.size() > 0); + assertTrue("Must supply at least one numeric field", numericFields.size() > 0); + + // If depth is exhausted, or 50% of the time return a terminal + // Helps limit ridiculously large compound queries + if (depth == 0 || randomBoolean()) { + return randomTerminalQuery(stringFields, numericFields, numDocs); + } + + switch (randomIntBetween(0,5)) { + case 0: + return randomTerminalQuery(stringFields, numericFields, numDocs); + case 1: + return QueryBuilders.boolQuery().must(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)) + .filter(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); + case 2: + return randomBoolQuery(stringFields, numericFields, numDocs, depth); + case 3: + // disabled for now because of https://issues.apache.org/jira/browse/LUCENE-6781 + //return randomBoostingQuery(stringFields, numericFields, numDocs, depth); + case 4: + return randomConstantScoreQuery(stringFields, numericFields, numDocs, depth); + case 5: + return randomDisMaxQuery(stringFields, numericFields, numDocs, depth); + default: + return randomTerminalQuery(stringFields, numericFields, numDocs); + } + } + + private static QueryBuilder randomTerminalQuery(List stringFields, List numericFields, int numDocs) { + switch (randomIntBetween(0,6)) { + case 0: + return randomTermQuery(stringFields, numDocs); + case 1: + return randomTermsQuery(stringFields, numDocs); + case 2: + return randomRangeQuery(numericFields, numDocs); + case 3: + return QueryBuilders.matchAllQuery(); + case 4: + return randomCommonTermsQuery(stringFields, numDocs); + case 5: + return randomFuzzyQuery(stringFields); + case 6: + return randomIDsQuery(); + default: + return randomTermQuery(stringFields, numDocs); + } + } + + private static String randomQueryString(int max) { + StringBuilder qsBuilder = new StringBuilder(); + + for (int i = 0; i < max; i++) { + qsBuilder.append(English.intToEnglish(randomInt(max))); + qsBuilder.append(" "); + } + + return qsBuilder.toString().trim(); + } + + private static String randomField(List fields) { + return fields.get(randomInt(fields.size() - 1)); + } + + + + private static QueryBuilder randomTermQuery(List fields, int numDocs) { + return QueryBuilders.termQuery(randomField(fields), randomQueryString(1)); + } + + private static QueryBuilder randomTermsQuery(List fields, int numDocs) { + int numTerms = randomInt(numDocs); + ArrayList terms = new ArrayList<>(numTerms); + + for (int i = 0; i < numTerms; i++) { + terms.add(randomQueryString(1)); + } + + return QueryBuilders.termsQuery(randomField(fields), terms); + } + + private static QueryBuilder randomRangeQuery(List fields, int numDocs) { + QueryBuilder q = QueryBuilders.rangeQuery(randomField(fields)); + + if (randomBoolean()) { + ((RangeQueryBuilder)q).from(randomIntBetween(0, numDocs / 2 - 1)); + } + if (randomBoolean()) { + ((RangeQueryBuilder)q).to(randomIntBetween(numDocs / 2, numDocs)); + } + + return q; + } + + private static QueryBuilder randomBoolQuery(List stringFields, List numericFields, int numDocs, int depth) { + QueryBuilder q = QueryBuilders.boolQuery(); + int numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).must(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); + } + + numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).should(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); + } + + numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).mustNot(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); + } + + return q; + } + + private static QueryBuilder randomBoostingQuery(List stringFields, List numericFields, int numDocs, int depth) { + return QueryBuilders.boostingQuery( + randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1), + randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)) + .boost(randomFloat()) + .negativeBoost(randomFloat()); + } + + private static QueryBuilder randomConstantScoreQuery(List stringFields, List numericFields, int numDocs, int depth) { + return QueryBuilders.constantScoreQuery(randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)); + } + + private static QueryBuilder randomCommonTermsQuery(List fields, int numDocs) { + int numTerms = randomInt(numDocs); + + QueryBuilder q = QueryBuilders.commonTermsQuery(randomField(fields), randomQueryString(numTerms)); + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).cutoffFrequency(randomFloat()); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).highFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) + .highFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).lowFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) + .lowFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); + } + + return q; + } + + private static QueryBuilder randomFuzzyQuery(List fields) { + + QueryBuilder q = QueryBuilders.fuzzyQuery(randomField(fields), randomQueryString(1)); + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + switch (randomIntBetween(0, 4)) { + case 0: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.AUTO); + break; + case 1: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.ONE); + break; + case 2: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.TWO); + break; + case 3: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.ZERO); + break; + case 4: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.fromEdits(randomIntBetween(0,2))); + break; + default: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.AUTO); + break; + } + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).maxExpansions(Math.abs(randomInt())); + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).prefixLength(Math.abs(randomInt())); + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).transpositions(randomBoolean()); + } + + return q; + } + + private static QueryBuilder randomDisMaxQuery(List stringFields, List numericFields, int numDocs, int depth) { + QueryBuilder q = QueryBuilders.disMaxQuery(); + + int numClauses = randomIntBetween(1, 10); + for (int i = 0; i < numClauses; i++) { + ((DisMaxQueryBuilder)q).add(randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)); + } + + if (randomBoolean()) { + ((DisMaxQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + ((DisMaxQueryBuilder)q).tieBreaker(randomFloat()); + } + + return q; + } + + private static QueryBuilder randomIDsQuery() { + QueryBuilder q = QueryBuilders.idsQuery(); + + int numIDs = randomInt(100); + for (int i = 0; i < numIDs; i++) { + ((IdsQueryBuilder)q).addIds(String.valueOf(randomInt())); + } + + if (randomBoolean()) { + ((IdsQueryBuilder)q).boost(randomFloat()); + } + + return q; + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java b/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java new file mode 100644 index 00000000000..de275eaffca --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.hamcrest; + + +public class DoubleMatcher { + + /** + * Better floating point comparisons courtesy of https://github.com/brazzy/floating-point-gui.de + * + * Snippet adapted to use doubles instead of floats + */ + public static boolean nearlyEqual(double a, double b, double epsilon) { + final double absA = Math.abs(a); + final double absB = Math.abs(b); + final double diff = Math.abs(a - b); + + if (a == b) { // shortcut, handles infinities + return true; + } else if (a == 0 || b == 0 || diff < Double.MIN_NORMAL) { + // a or b is zero or both are extremely close to it + // relative error is less meaningful here + return diff < (epsilon * Double.MIN_NORMAL); + } else { // use relative error + return diff / Math.min((absA + absB), Double.MAX_VALUE) < epsilon; + } + } +} diff --git a/docs/reference/search.asciidoc b/docs/reference/search.asciidoc index 2d8a1f8bc9a..da7d2e5ee4b 100644 --- a/docs/reference/search.asciidoc +++ b/docs/reference/search.asciidoc @@ -95,6 +95,8 @@ include::search/validate.asciidoc[] include::search/explain.asciidoc[] +include::search/profile.asciidoc[] + include::search/percolate.asciidoc[] include::search/field-stats.asciidoc[] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc new file mode 100644 index 00000000000..3d701ae5198 --- /dev/null +++ b/docs/reference/search/profile.asciidoc @@ -0,0 +1,601 @@ +[[search-profile]] +== Profile API + +coming[2.2.0] + +experimental[] + +The Profile API provides detailed timing information about the execution of individual components +in a query. It gives the user insight into how queries are executed at a low level so that +the user can understand why certain queries are slow, and take steps to improve their slow queries. + +The output from the Profile API is *very* verbose, especially for complicated queries executed across +many shards. Pretty-printing the response is recommended to help understand the output + +[NOTE] +--------------------- +The details provided by the Profile API directly expose Lucene class names and concepts, which means +that complete interpretation of the results require fairly advanced knowledge of Lucene. This +page attempts to give a crash-course in how Lucene executes queries so that you can use the Profile API to successfully +diagnose and debug queries, but it is only an overview. For complete understanding, please refer +to Lucene's documentation and, in places, the code. + +With that said, a complete understanding is often not required to fix a slow query. It is usually +sufficient to see that a particular component of a query is slow, and not necessarily understand why +the `advance` phase of that query is the cause, for example. +--------------------- + +[float] +=== Usage + +Any `_search` request can be profiled by adding a top-level `profile` parameter: + +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search' -d '{ + "profile": true,<1> + "query" : { + "match" : { "message" : "search test" } + } +} +-------------------------------------------------- +<1> Setting the top-level `profile` parameter to `true` will enable profiling +for the search + +This will yield the following result: + +[source,js] +-------------------------------------------------- +{ + "took": 25, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 1, + "hits": [ ... ] <1> + }, + "profile": { + "shards": [ + { + "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", + "searches": [ + { + "query": [ + { + "query_type": "BooleanQuery", + "lucene": "message:search message:test", + "time": "15.52889800ms", + "breakdown": { + "score": 0, + "next_doc": 24495, + "match": 0, + "create_weight": 8488388, + "build_scorer": 7016015, + "advance": 0 + }, + "children": [ + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "4.938855000ms", + "breakdown": { + "score": 0, + "next_doc": 18332, + "match": 0, + "create_weight": 2945570, + "build_scorer": 1974953, + "advance": 0 + } + }, + { + "query_type": "TermQuery", + "lucene": "message:test", + "time": "0.5016660000ms", + "breakdown": { + "score": 0, + "next_doc": 0, + "match": 0, + "create_weight": 170534, + "build_scorer": 331132, + "advance": 0 + } + } + ] + } + ], + "rewrite_time": 185002, + "collector": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "2.206529000ms" + } + ] + } + ] + } + ] + } +} +-------------------------------------------------- +<1> Search results are returned, but were omitted here for brevity + +Even for a simple query, the response is relatively complicated. Let's break it down piece-by-piece before moving +to more complex examples. + +First, the overall structure of the profile response is as follows: + +[source,js] +-------------------------------------------------- +{ + "profile": { + "shards": [ + { + "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", <1> + "searches": [ + { + "query": [...], <2> + "rewrite_time": 185002, <3> + "collector": [...] <4> + } + ] + } + ] + } +} +-------------------------------------------------- +<1> A profile is returned for each shard that participated in the response, and is identified +by a unique ID +<2> Each profile contains a section which holds details about the query execution +<3> Each profile has a single time representing the cumulative rewrite time +<4> Each profile also contains a section about the Lucene Collectors which run the search + +Because a search request may be executed against one or more shards in an index, and a search may cover +one or more indices, the top level element in the profile response is an array of `shard` objects. +Each shard object lists it's `id` which uniquely identifies the shard. The ID's format is +`[nodeID][indexName][shardID]`. + +The profile itself may consist of one or more "searches", where a search is a query executed against the underlying +Lucene index. Most Search Requests submitted by the user will only execute a single `search` against the Lucene index. +But occasionally multiple searches will be executed, such as including a global aggregation (which needs to execute +a secondary "match_all" query for the global context). + +Inside each `search` object there will be two arrays of profiled information: +a `query` array and a `collector` array. In the future, more sections may be added, such as `suggest`, `highlight`, +`aggregations`, etc + +There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds). + +=== `query` Section + +The `query` section contains detailed timing of the query tree executed by Lucene on a particular shard. +The overall structure of this query tree will resemble your original Elasticsearch query, but may be slightly +(or sometimes very) different. It will also use similar but not always identical naming. Using our previous +`term` query example, let's analyze the `query` section: + +[source,js] +-------------------------------------------------- +"query": [ + { + "query_type": "BooleanQuery", + "lucene": "message:search message:test", + "time": "15.52889800ms", + "breakdown": {...}, + "children": [ + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "4.938855000ms", + "breakdown": {...} + }, + { + "query_type": "TermQuery", + "lucene": "message:test", + "time": "0.5016660000ms", + "breakdown": {...} + } + ] + } +] +-------------------------------------------------- +<1> The breakdown timings are omitted for simplicity + +Based on the profile structure, we can see that our `match` query was rewritten by Lucene into a BooleanQuery with two +clauses (both holding a TermQuery). The `"query_type"` field displays the Lucene class name, and often aligns with +the equivalent name in Elasticsearch. The `"lucene"` field displays the Lucene explanation text for the query, and +is made available to help differentiating between parts of your query (e.g. both `"message:search"` and `"message:test"` +are TermQuery's and would appear identical otherwise. + +The `"time"` field shows that this query took ~15ms for the entire BooleanQuery to execute. The recorded time is inclusive +of all children. + +The `"breakdown"` field will give detailed stats about how the time was spent, we'll look at +that in a moment. Finally, the `"children"` array lists any sub-queries that may be present. Because we searched for two +values ("search test"), our BooleanQuery holds two children TermQueries. They have identical information (query_type, time, +breakdown, etc). Children are allowed to have their own children. + +==== Timing Breakdown + +The `breakdown` component lists detailed timing statistics about low-level Lucene execution: + +[source,js] +-------------------------------------------------- +"breakdown": { + "score": 0, + "next_doc": 24495, + "match": 0, + "create_weight": 8488388, + "build_scorer": 7016015, + "advance": 0 + +} +-------------------------------------------------- + +Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall +`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is +actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, +the breakdown is inclusive of all children times. + +The meaning of the stats are as follows: + +[float] +=== All parameters: + +[horizontal] + +`create_weight`:: + + A Query in Lucene must be capable of reuse across multiple IndexSearchers (think of it as the engine that + executes a search against a specific Lucene Index). This puts Lucene in a tricky spot, since many queries + need to accumulate temporary state/statistics associated with the index it is being used against, but the + Query contract mandates that it must be immutable. + + To get around this, Lucene asks each query to generate a Weight object which acts as a temporary context + object to hold state associated with this particular (IndexSearcher, Query) tuple. The `weight` metric + shows how long this process takes + +`build_scorer`:: + + This parameter shows how long it takes to build a Scorer for the query. A Scorer is the mechanism that + iterates over matching documents generates a score per-document (e.g. how well does "foo" match the document?). + Note, this records the time required to generate the Scorer object, not actuall score the documents. Some + queries have faster or slower initialization of the Scorer, depending on optimizations, complexity, etc. + + This may also showing timing associated with caching, if enabled and/or applicable for the query + +`next_doc`:: + + The Lucene method `next_doc` returns Doc ID of the next document matching the query. This statistic shows + the time it takes to determine which document is the next match, a process that varies considerably depending + on the nature of the query. Next_doc is a specialized form of advance() which is more convenient for many + queries in Lucene. It is equivalent to advance(docId() + 1) + +`advance`:: + + `advance` is the "lower level" version of next_doc: it serves the same purpose of finding the next matching + doc, but requires the calling query to perform extra tasks such as identifying and moving past skips, etc. + However, not all queries can use next_doc, so `advance` is also timed for those queries. + + Conjunctions (e.g. `must` clauses in a boolean) are typical consumers of `advance` + +`matches`:: + + Some queries, such as phrase queries, match documents using a "Two Phase" process. First, the document is + "approximately" matched, and if it matches approximately, it is checked a second time with a more rigorous + (and expensive) process. The second phase verification is what the `matches` statistic measures. + + For example, a phrase query first checks a document approximately by ensuring all terms in the phrase are + present in the doc. If all the terms are present, it then executes the second phase verification to ensure + the terms are in-order to form the phrase, which is relatively more expensive than just checking for presence + of the terms. + + Because this two-phase process is only used by a handful of queries, the `metric` statistic will often be zero + +`score`:: + + This records the time taken to score a particular document via it's Scorer + +=== `collectors` Section + +The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" +which is responsible for coordinating the traversal, scoring and collection of matching documents. Collectors +are also how a single query can record aggregation results, execute unscoped "global" queries, execute post-query +filters, etc. + +Looking at the previous example: + +[source,js] +-------------------------------------------------- +"collector": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "2.206529000ms" + } +] +-------------------------------------------------- + +We see a single collector named `SimpleTopScoreDocCollector`. This is the default "scoring and sorting" Collector +used by Elasticsearch. The `"reason"` field attempts to give an plain english description of the class name. The +`"time` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists +all sub-collectors. + +It should be noted that Collector times are **independent** from the Query times. They are calculated, combined +and normalized independently! Due to the nature of Lucene's execution, it is impossible to "merge" the times +from the Collectors into the Query section, so they are displayed in separate portions. + +For reference, the various collector reason's are: + +[horizontal] +`search_sorted`:: + + A collector that scores and sorts documents. This is the most common collector and will be seen in most + simple searches + +`search_count`:: + + A collector that only counts the number of documents that match the query, but does not fetch the source. + This is seen when `size: 0` or `search_type=count` is specified + +`search_terminate_after_count`:: + + A collector that terminates search execution after `n` matching documents have been found. This is seen + when the `terminate_after_count` query parameter has been specified + +`search_min_score`:: + + A collector that only returns matching documents that have a score greater than `n`. This is seen when + the top-level paramenter `min_score` has been specified. + +`search_multi`:: + + A collector that wraps several other collectors. This is seen when combinations of search, aggregations, + global aggs and post_filters are combined in a single search. + +`search_timeout`:: + + A collector that halts execution after a specified period of time. This is seen when a `timeout` top-level + parameter has been specified. + +`aggregation`:: + + A collector that Elasticsearch uses to run aggregations against the query scope. A single `aggregation` + collector is used to collect documents for *all* aggregations, so you will see a list of aggregations + in the name rather. + +`global_aggregation`:: + + A collector that executes an aggregation against the global query scope, rather than the specified query. + Because the global scope is necessarily different from the executed query, it must execute it's own + match_all query (which you will see added to the Query section) to collect your entire dataset + + + +=== `rewrite` Section + + All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or + more times, and the process continues until the query stops changing. This process allows Lucene to perform + optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, + etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans + are unnecessary in this case. + + The rewriting process is complex and difficult to display, since queries can change drastically. Rather than + showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This + value is cumulative and contains the total time for all queries being rewritten. + +=== A more complex example + +To demonstrate a slightly more complex query and the associated results, we can profile the following query: + +[source,js] +-------------------------------------------------- +GET /test/_search +{ + "profile": true, + "query": { + "term": { + "message": { + "value": "search" + } + } + }, + "aggs": { + "non_global_term": { + "terms": { + "field": "agg" + }, + "aggs": { + "second_term": { + "terms": { + "field": "sub_agg" + } + } + } + }, + "another_agg": { + "cardinality": { + "field": "aggB" + } + }, + "global_agg": { + "global": {}, + "aggs": { + "my_agg2": { + "terms": { + "field": "globalAgg" + } + } + } + } + }, + "post_filter": { + "term": { + "my_field": "foo" + } + } +} +-------------------------------------------------- + +This example has: + +- A query +- A scoped aggregation +- A global aggregation +- A post_filter + +And the response: + + +[source,js] +-------------------------------------------------- +{ + "profile": { + "shards": [ + { + "id": "[P6-vulHtQRWuD4YnubWb7A][test][0]", + "searches": [ + { + "query": [ + { + "query_type": "TermQuery", + "lucene": "my_field:foo", + "time": "0.4094560000ms", + "breakdown": { + "score": 0, + "next_doc": 0, + "match": 0, + "create_weight": 31584, + "build_scorer": 377872, + "advance": 0 + } + }, + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "0.3037020000ms", + "breakdown": { + "score": 0, + "next_doc": 5936, + "match": 0, + "create_weight": 185215, + "build_scorer": 112551, + "advance": 0 + } + } + ], + "rewrite_time": 7208, + "collector": [ + { + "name": "MultiCollector", + "reason": "search_multi", + "time": "1.378943000ms", + "children": [ + { + "name": "FilteredCollector", + "reason": "search_post_filter", + "time": "0.4036590000ms", + "children": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "0.006391000000ms" + } + ] + }, + { + "name": "BucketCollector: [[non_global_term, another_agg]]", + "reason": "aggregation", + "time": "0.9546020000ms" + } + ] + } + ] + }, + { + "query": [ + { + "query_type": "MatchAllDocsQuery", + "lucene": "*:*", + "time": "0.04829300000ms", + "breakdown": { + "score": 0, + "next_doc": 3672, + "match": 0, + "create_weight": 6311, + "build_scorer": 38310, + "advance": 0 + } + } + ], + "rewrite_time": 1067, + "collector": [ + { + "name": "GlobalAggregator: [global_agg]", + "reason": "aggregation_global", + "time": "0.1226310000ms" + } + ] + } + ] + } + ] + } +} +-------------------------------------------------- + +As you can see, the output is significantly verbose from before. All the major portions of the query are +represented: + +1. The first `TermQuery` (message:search) represents the main `term` query +2. The second `TermQuery` (my_field:foo) represents the `post_filter` query +3. There is a `MatchAllDocsQuery` (*:*) query which is being executed as a second, distinct search. This was +not part of the query specified by the user, but is auto-generated by the global aggregation to provide a global query scope + +The Collector tree is fairly straightforward, showing how a single MultiCollector wraps a FilteredCollector +to execute the post_filter (and in turn wraps the normal scoring SimpleCollector), a BucketCollector to run +all scoped aggregations. In the MatchAll search, there is a single GlobalAggregator to run the global aggregation. + +=== Performance Notes + +Like any profiler, the Profile API introduce a non-negligible overhead to query execution. The act of instrumenting +low-level method calls such as `advance` and `next_doc` can be fairly expensive, since these methods are called +in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not +be compared against non-profiled query times. Profiling is just a diagnostic tool. + +There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This +could cause some queries to report larger relative times than their non-profiled counterparts, but in general should +not have a drastic effect compared to other components in the profiled query. + +=== Limitations + +- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` +- Detailed breakdown for aggregations is not currently available past the high-level overview provided +from the Collectors +- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were +never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed +diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures or +other bugs, please report them! + +=== Understanding MultiTermQuery output + +A special note needs to be made about the `MultiTermQuery` class of queries. This includes wildcards, regex and fuzzy +queries. These queries emit very verbose responses, and are not overly structured. + +Essentially, these queries rewrite themselves on a per-segment basis. If you imagine the wildcard query `b*`, it technically +can match any token that begins with the letter "b". It would be impossible to enumerate all possible combinations, +so Lucene rewrites the query in context of the segment being evaluated. E.g. one segment may contain the tokens +`[bar, baz]`, so the query rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may only have the +token `[bakery]`, so query rewrites to a single TermQuery for "bakery". + +Due to this dynamic, per-segment rewriting, the clean tree structure becomes distorted and no longer follows a clean +"lineage" showing how one query rewrites into the next. At present time, all we can do is apologize, and suggest you +collapse the details for that query's children if it is too confusing. Luckily, all the timing statistics are correct, +just not the physical layout in the response, so it is sufficient to just analyze the top-level MultiTermQuery and +ignore it's children if you find the details too tricky to interpret. + +Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :) \ No newline at end of file diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 468b1877250..51fb0c905f4 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -60,6 +59,8 @@ import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profiler; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -293,7 +294,7 @@ public class TestSearchContext extends SearchContext { } public void setSearcher(Engine.Searcher searcher) { - this.searcher = new ContextIndexSearcher(this, searcher); + this.searcher = new ContextIndexSearcher(searcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); } @Override @@ -660,8 +661,11 @@ public class TestSearchContext extends SearchContext { public void copyContextAndHeadersFrom(HasContextAndHeaders other) {} @Override - public Map, Collector> queryCollectors() {return queryCollectors;} + public Profilers getProfilers() { + return null; // no profiling + } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Map, Collector> queryCollectors() {return queryCollectors;} + } From b62f8f11d4d5dde7161b4f6da9c87563e128c0c2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Dec 2015 16:57:41 -0500 Subject: [PATCH 116/322] Address possible race condition in test This commit addresses a potential race condition in ClusterServiceIT#testClusterStateBatchedUpdates. The potential race condition is that the main test thread could be released to execute the final test assertions before the cluster state publication callbacks had completed thereby causing a situtation where the test assertions could be executed before the final test state had been realized. --- .../org/elasticsearch/cluster/ClusterServiceIT.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index a7417b4362f..5a59341ef35 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -54,6 +54,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -762,6 +763,11 @@ public class ClusterServiceIT extends ESIntegTestCase { } } + int numberOfThreads = randomIntBetween(2, 8); + int tasksSubmittedPerThread = randomIntBetween(1, 1024); + int numberOfExecutors = Math.max(1, numberOfThreads / 4); + final Semaphore semaphore = new Semaphore(numberOfExecutors); + class TaskExecutor implements ClusterStateTaskExecutor { private AtomicInteger counter = new AtomicInteger(); private AtomicInteger batches = new AtomicInteger(); @@ -775,6 +781,7 @@ public class ClusterServiceIT extends ESIntegTestCase { if (randomBoolean()) { maybeUpdatedClusterState = ClusterState.builder(currentState).build(); batches.incrementAndGet(); + semaphore.acquire(); } return BatchResult.builder().successes(tasks).build(maybeUpdatedClusterState); } @@ -787,10 +794,9 @@ public class ClusterServiceIT extends ESIntegTestCase { @Override public void clusterStatePublished(ClusterState newClusterState) { published.incrementAndGet(); + semaphore.release(); } } - int numberOfThreads = randomIntBetween(2, 8); - int tasksSubmittedPerThread = randomIntBetween(1, 1024); ConcurrentMap counters = new ConcurrentHashMap<>(); CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); @@ -807,7 +813,6 @@ public class ClusterServiceIT extends ESIntegTestCase { } }; - int numberOfExecutors = Math.max(1, numberOfThreads / 4); List executors = new ArrayList<>(); for (int i = 0; i < numberOfExecutors; i++) { executors.add(new TaskExecutor()); @@ -853,6 +858,8 @@ public class ClusterServiceIT extends ESIntegTestCase { // wait until all the cluster state updates have been processed updateLatch.await(); + // and until all of the publication callbacks have completed + semaphore.acquire(numberOfExecutors); // assert the number of executed tasks is correct assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get()); From f5a992486e62157163873e1c4114ba3a6e1893d8 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 17 Dec 2015 17:02:07 -0500 Subject: [PATCH 117/322] [Docs] Add missing annotation, fix blockdef --- docs/reference/search/profile.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3d701ae5198..366af6d57c2 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -13,7 +13,7 @@ The output from the Profile API is *very* verbose, especially for complicated qu many shards. Pretty-printing the response is recommended to help understand the output [NOTE] ---------------------- +======================================= The details provided by the Profile API directly expose Lucene class names and concepts, which means that complete interpretation of the results require fairly advanced knowledge of Lucene. This page attempts to give a crash-course in how Lucene executes queries so that you can use the Profile API to successfully @@ -23,7 +23,7 @@ to Lucene's documentation and, in places, the code. With that said, a complete understanding is often not required to fix a slow query. It is usually sufficient to see that a particular component of a query is slow, and not necessarily understand why the `advance` phase of that query is the cause, for example. ---------------------- +======================================= [float] === Usage @@ -185,7 +185,7 @@ The overall structure of this query tree will resemble your original Elasticsear "query_type": "BooleanQuery", "lucene": "message:search message:test", "time": "15.52889800ms", - "breakdown": {...}, + "breakdown": {...}, <1> "children": [ { "query_type": "TermQuery", From e128298c5da69ce8ccc1cd82d2b94cabd52df094 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 17 Dec 2015 17:27:28 -0500 Subject: [PATCH 118/322] [Docs] Formatting tweaks --- docs/reference/search/profile.asciidoc | 35 +++++++++++++++----------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 366af6d57c2..b62d83eee6b 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -247,14 +247,14 @@ The meaning of the stats are as follows: === All parameters: [horizontal] - `create_weight`:: A Query in Lucene must be capable of reuse across multiple IndexSearchers (think of it as the engine that executes a search against a specific Lucene Index). This puts Lucene in a tricky spot, since many queries need to accumulate temporary state/statistics associated with the index it is being used against, but the Query contract mandates that it must be immutable. - + {empty} + + {empty} + To get around this, Lucene asks each query to generate a Weight object which acts as a temporary context object to hold state associated with this particular (IndexSearcher, Query) tuple. The `weight` metric shows how long this process takes @@ -265,7 +265,8 @@ The meaning of the stats are as follows: iterates over matching documents generates a score per-document (e.g. how well does "foo" match the document?). Note, this records the time required to generate the Scorer object, not actuall score the documents. Some queries have faster or slower initialization of the Scorer, depending on optimizations, complexity, etc. - + {empty} + + {empty} + This may also showing timing associated with caching, if enabled and/or applicable for the query `next_doc`:: @@ -280,7 +281,8 @@ The meaning of the stats are as follows: `advance` is the "lower level" version of next_doc: it serves the same purpose of finding the next matching doc, but requires the calling query to perform extra tasks such as identifying and moving past skips, etc. However, not all queries can use next_doc, so `advance` is also timed for those queries. - + {empty} + + {empty} + Conjunctions (e.g. `must` clauses in a boolean) are typical consumers of `advance` `matches`:: @@ -288,18 +290,21 @@ The meaning of the stats are as follows: Some queries, such as phrase queries, match documents using a "Two Phase" process. First, the document is "approximately" matched, and if it matches approximately, it is checked a second time with a more rigorous (and expensive) process. The second phase verification is what the `matches` statistic measures. - + {empty} + + {empty} + For example, a phrase query first checks a document approximately by ensuring all terms in the phrase are present in the doc. If all the terms are present, it then executes the second phase verification to ensure the terms are in-order to form the phrase, which is relatively more expensive than just checking for presence of the terms. - + {empty} + + {empty} + Because this two-phase process is only used by a handful of queries, the `metric` statistic will often be zero `score`:: This records the time taken to score a particular document via it's Scorer + === `collectors` Section The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" @@ -378,15 +383,15 @@ For reference, the various collector reason's are: === `rewrite` Section - All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or - more times, and the process continues until the query stops changing. This process allows Lucene to perform - optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, - etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans - are unnecessary in this case. +All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or +more times, and the process continues until the query stops changing. This process allows Lucene to perform +optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, +etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans +are unnecessary in this case. - The rewriting process is complex and difficult to display, since queries can change drastically. Rather than - showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This - value is cumulative and contains the total time for all queries being rewritten. +The rewriting process is complex and difficult to display, since queries can change drastically. Rather than +showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This +value is cumulative and contains the total time for all queries being rewritten. === A more complex example @@ -553,7 +558,7 @@ represented: 1. The first `TermQuery` (message:search) represents the main `term` query 2. The second `TermQuery` (my_field:foo) represents the `post_filter` query -3. There is a `MatchAllDocsQuery` (*:*) query which is being executed as a second, distinct search. This was +3. There is a `MatchAllDocsQuery` (\*:*) query which is being executed as a second, distinct search. This was not part of the query specified by the user, but is auto-generated by the global aggregation to provide a global query scope The Collector tree is fairly straightforward, showing how a single MultiCollector wraps a FilteredCollector From 6a99796b02383862825687a4fb57080ddd92f616 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Dec 2015 16:57:39 -0800 Subject: [PATCH 119/322] Build: Move test framework under a "test" top level dir This allows adding more test projects, eg integ test fixtures that will be coming soon. --- build.gradle | 6 ++--- .../elasticsearch/gradle/BuildPlugin.groovy | 3 ++- .../gradle/plugin/PluginBuildPlugin.groovy | 2 +- .../test/StandaloneTestBasePlugin.groovy | 2 +- core/build.gradle | 2 +- settings.gradle | 2 +- test/build.gradle | 23 +++++++++++++++++++ .../framework}/build.gradle | 1 - .../bootstrap/BootstrapForTesting.java | 0 .../cache/recycler/MockPageCacheRecycler.java | 0 .../MockInternalClusterInfoService.java | 0 .../cluster/routing/TestShardRouting.java | 0 .../common/cli/CliToolTestCase.java | 0 .../common/io/FileTestUtils.java | 0 .../common/io/PathUtilsForTesting.java | 0 .../common/util/MockBigArrays.java | 0 .../elasticsearch/index/MapperTestUtils.java | 0 .../index/MockEngineFactoryPlugin.java | 0 .../java/org/elasticsearch/node/MockNode.java | 0 .../elasticsearch/node/NodeMocksPlugin.java | 0 .../percolator/PercolatorTestUtil.java | 0 .../elasticsearch/plugins/PluginTestUtil.java | 0 .../script/MockScriptEngine.java | 0 .../search/MockSearchService.java | 0 .../bucket/AbstractTermsTestCase.java | 0 ...NativeSignificanceScoreScriptNoParams.java | 0 ...tiveSignificanceScoreScriptWithParams.java | 0 .../bucket/script/TestScript.java | 0 .../metrics/AbstractNumericTestCase.java | 0 .../elasticsearch/test/BackgroundIndexer.java | 0 .../test/CompositeTestCluster.java | 0 .../elasticsearch/test/CorruptionUtils.java | 0 .../elasticsearch/test/DummyShardLock.java | 0 .../test/ESAllocationTestCase.java | 0 .../test/ESBackcompatTestCase.java | 0 .../elasticsearch/test/ESIntegTestCase.java | 0 .../test/ESSingleNodeTestCase.java | 0 .../org/elasticsearch/test/ESTestCase.java | 0 .../test/ESTokenStreamTestCase.java | 0 .../org/elasticsearch/test/ExternalNode.java | 0 .../test/ExternalTestCluster.java | 0 .../test/FieldMaskingReader.java | 0 .../test/IndexSettingsModule.java | 0 .../test/InternalTestCluster.java | 0 .../test/MockIndexEventListener.java | 0 .../test/NodeConfigurationSource.java | 0 .../org/elasticsearch/test/StreamsUtils.java | 0 .../org/elasticsearch/test/TestCluster.java | 0 .../elasticsearch/test/TestSearchContext.java | 0 .../org/elasticsearch/test/VersionUtils.java | 0 .../elasticsearch/test/XContentTestUtils.java | 0 .../test/client/RandomizingClient.java | 0 .../test/cluster/NoopClusterService.java | 0 .../test/cluster/TestClusterService.java | 0 .../ClusterDiscoveryConfiguration.java | 0 .../BlockClusterStateProcessing.java | 0 .../IntermittentLongGCDisruption.java | 0 .../test/disruption/LongGCDisruption.java | 0 .../disruption/NetworkDelaysPartition.java | 0 .../NetworkDisconnectPartition.java | 0 .../test/disruption/NetworkPartition.java | 0 .../test/disruption/NetworkPartitionIT.java | 0 .../NetworkUnresponsivePartition.java | 0 .../test/disruption/NoOpDisruptionScheme.java | 0 .../disruption/ServiceDisruptionScheme.java | 0 .../test/disruption/SingleNodeDisruption.java | 0 .../SlowClusterStateProcessing.java | 0 .../test/engine/AssertingSearcher.java | 0 .../test/engine/MockEngineFactory.java | 0 .../test/engine/MockEngineSupport.java | 0 .../test/engine/MockInternalEngine.java | 0 .../test/engine/MockShadowEngine.java | 0 .../engine/ThrowingLeafReaderWrapper.java | 0 .../test/gateway/NoopGatewayAllocator.java | 0 .../test/hamcrest/CollectionAssertions.java | 0 .../test/hamcrest/CollectionMatchers.java | 0 .../hamcrest/ElasticsearchAssertions.java | 0 .../test/hamcrest/ElasticsearchMatchers.java | 0 .../test/hamcrest/RegexMatcher.java | 0 .../test/junit/annotations/Network.java | 0 .../test/junit/annotations/TestLogging.java | 0 .../test/junit/listeners/LoggingListener.java | 0 .../junit/listeners/ReproduceInfoPrinter.java | 0 .../junit/rule/RepeatOnExceptionRule.java | 0 .../rest/BlacklistedPathPatternMatcher.java | 0 .../test/rest/ESRestTestCase.java | 0 .../test/rest/FakeRestRequest.java | 0 .../test/rest/RestTestCandidate.java | 0 .../test/rest/RestTestExecutionContext.java | 0 .../org/elasticsearch/test/rest/Stash.java | 0 .../test/rest/client/RestClient.java | 0 .../test/rest/client/RestException.java | 0 .../test/rest/client/RestPath.java | 0 .../test/rest/client/RestResponse.java | 0 .../client/http/HttpDeleteWithEntity.java | 0 .../rest/client/http/HttpGetWithEntity.java | 0 .../rest/client/http/HttpRequestBuilder.java | 0 .../test/rest/client/http/HttpResponse.java | 0 .../test/rest/json/JsonPath.java | 0 .../test/rest/parser/DoSectionParser.java | 0 .../rest/parser/GreaterThanEqualToParser.java | 0 .../test/rest/parser/GreaterThanParser.java | 0 .../test/rest/parser/IsFalseParser.java | 0 .../test/rest/parser/IsTrueParser.java | 0 .../test/rest/parser/LengthParser.java | 0 .../rest/parser/LessThanOrEqualToParser.java | 0 .../test/rest/parser/LessThanParser.java | 0 .../test/rest/parser/MatchParser.java | 0 .../rest/parser/RestTestFragmentParser.java | 0 .../rest/parser/RestTestParseException.java | 0 .../rest/parser/RestTestSectionParser.java | 0 .../parser/RestTestSuiteParseContext.java | 0 .../test/rest/parser/RestTestSuiteParser.java | 0 .../test/rest/parser/SetSectionParser.java | 0 .../test/rest/parser/SetupSectionParser.java | 0 .../test/rest/parser/SkipSectionParser.java | 0 .../test/rest/section/ApiCallSection.java | 0 .../test/rest/section/Assertion.java | 0 .../test/rest/section/DoSection.java | 0 .../test/rest/section/ExecutableSection.java | 0 .../rest/section/GreaterThanAssertion.java | 0 .../section/GreaterThanEqualToAssertion.java | 0 .../test/rest/section/IsFalseAssertion.java | 0 .../test/rest/section/IsTrueAssertion.java | 0 .../test/rest/section/LengthAssertion.java | 0 .../test/rest/section/LessThanAssertion.java | 0 .../section/LessThanOrEqualToAssertion.java | 0 .../test/rest/section/MatchAssertion.java | 0 .../test/rest/section/RestTestSuite.java | 0 .../test/rest/section/SetSection.java | 0 .../test/rest/section/SetupSection.java | 0 .../test/rest/section/SkipSection.java | 0 .../test/rest/section/TestSection.java | 0 .../elasticsearch/test/rest/spec/RestApi.java | 0 .../test/rest/spec/RestApiParser.java | 0 .../test/rest/spec/RestSpec.java | 0 .../test/rest/support/Features.java | 0 .../test/rest/support/FileUtils.java | 0 .../test/store/MockFSDirectoryService.java | 0 .../test/store/MockFSIndexStore.java | 0 .../transport/AssertingLocalTransport.java | 0 .../test/transport/CapturingTransport.java | 0 .../test/transport/MockTransportService.java | 0 .../src/main/resources/log4j.properties | 0 .../BlacklistedPathPatternMatcherTests.java | 0 .../rest/test/AbstractParserTestCase.java | 0 .../test/rest/test/AssertionParsersTests.java | 0 .../test/rest/test/DoSectionParserTests.java | 0 .../test/rest/test/FileUtilsTests.java | 0 .../test/rest/test/JsonPathTests.java | 0 .../rest/test/RestApiParserFailingTests.java | 0 .../test/rest/test/RestApiParserTests.java | 0 .../test/rest/test/RestTestParserTests.java | 0 .../test/rest/test/SetSectionParserTests.java | 0 .../rest/test/SetupSectionParserTests.java | 0 .../rest/test/SkipSectionParserTests.java | 0 .../rest/test/TestSectionParserTests.java | 0 .../test/test/InternalTestClusterTests.java | 0 .../test/test/LoggingListenerTests.java | 0 .../test/test/SuiteScopeClusterIT.java | 0 .../test/test/TestScopeClusterIT.java | 0 .../test/test/VersionUtilsTests.java | 0 .../rest-api-spec/test/suite1/10_basic.yaml | 0 .../test/suite1/20_another_test.yaml | 0 .../rest-api-spec/test/suite2/10_basic.yaml | 0 .../rest-api-spec/test/suite2/15_test2.yaml | 0 166 files changed, 32 insertions(+), 9 deletions(-) create mode 100644 test/build.gradle rename {test-framework => test/framework}/build.gradle (98%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/common/io/FileTestUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/common/util/MockBigArrays.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/index/MapperTestUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/node/MockNode.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/script/MockScriptEngine.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/MockSearchService.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/BackgroundIndexer.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/CompositeTestCluster.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/CorruptionUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/DummyShardLock.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESIntegTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ExternalNode.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/ExternalTestCluster.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/FieldMaskingReader.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/IndexSettingsModule.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/InternalTestCluster.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/MockIndexEventListener.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/StreamsUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/TestCluster.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/TestSearchContext.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/VersionUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/XContentTestUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/client/RandomizingClient.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/junit/annotations/Network.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/Stash.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/RestClient.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/RestException.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/RestPath.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/Assertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/DoSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/SetSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/section/TestSection.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/support/Features.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java (100%) rename {test-framework => test/framework}/src/main/java/org/elasticsearch/test/transport/MockTransportService.java (100%) rename {test-framework => test/framework}/src/main/resources/log4j.properties (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java (100%) rename {test-framework => test/framework}/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java (100%) rename {test-framework => test/framework}/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml (100%) rename {test-framework => test/framework}/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml (100%) rename {test-framework => test/framework}/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml (100%) rename {test-framework => test/framework}/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml (100%) diff --git a/build.gradle b/build.gradle index 8a95fa90925..ab40587d5a1 100644 --- a/build.gradle +++ b/build.gradle @@ -109,7 +109,7 @@ subprojects { ext.projectSubstitutions = [ "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', "org.elasticsearch:elasticsearch:${version}": ':core', - "org.elasticsearch:test-framework:${version}": ':test-framework', + "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip', "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', @@ -141,8 +141,8 @@ subprojects { // the dependency is added. gradle.projectsEvaluated { allprojects { - if (project.path == ':test-framework') { - // :test-framework:test cannot run before and after :core:test + if (project.path == ':test:framework') { + // :test:framework:test cannot run before and after :core:test return } configurations.all { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c4d0ced6b5c..10f479ee100 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -202,7 +202,7 @@ class BuildPlugin implements Plugin { // force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself Closure disableTransitiveDeps = { ModuleDependency dep -> - if (!(dep instanceof ProjectDependency) && dep.getGroup() != 'org.elasticsearch') { + if (!(dep instanceof ProjectDependency) && dep.group.startsWith('org.elasticsearch') == false) { dep.transitive = false // also create a configuration just for this dependency version, so that later @@ -302,6 +302,7 @@ class BuildPlugin implements Plugin { options.compilerArgs << '-profile' << project.compactProfile } options.encoding = 'UTF-8' + //options.incremental = true } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 0d936ab0e15..9a000ab3296 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -60,7 +60,7 @@ public class PluginBuildPlugin extends BuildPlugin { private static void configureDependencies(Project project) { project.dependencies { provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" - testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}" + testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" // we "upgrade" these optional deps to provided for plugins, since they will run // with a full elasticsearch server that includes optional deps provided "com.spatial4j:spatial4j:${project.versions.spatial4j}" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy index f317254cd45..3063853c871 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy @@ -42,7 +42,7 @@ public class StandaloneTestBasePlugin implements Plugin { // only setup tests to build project.sourceSets.create('test') - project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}") + project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}") project.eclipse.classpath.sourceSets = [project.sourceSets.test] project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime] diff --git a/core/build.gradle b/core/build.gradle index f4eb2c0aebd..61cdd12a194 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -82,7 +82,7 @@ dependencies { compile "net.java.dev.jna:jna:${versions.jna}", optional if (isEclipse == false || project.path == ":core-tests") { - testCompile("org.elasticsearch:test-framework:${version}") { + testCompile("org.elasticsearch.test:framework:${version}") { // tests use the locally compiled version of core exclude group: 'org.elasticsearch', module: 'elasticsearch' } diff --git a/settings.gradle b/settings.gradle index e9fb0a043aa..3526c0429ef 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,7 +8,7 @@ List projects = [ 'distribution:tar', 'distribution:deb', 'distribution:rpm', - 'test-framework', + 'test:framework', 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', diff --git a/test/build.gradle b/test/build.gradle new file mode 100644 index 00000000000..037bb8d508e --- /dev/null +++ b/test/build.gradle @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +subprojects { + group = 'org.elasticsearch.test' + apply plugin: 'com.bmuschko.nexus' +} diff --git a/test-framework/build.gradle b/test/framework/build.gradle similarity index 98% rename from test-framework/build.gradle rename to test/framework/build.gradle index 6930abb3d23..a2c568f1d7f 100644 --- a/test-framework/build.gradle +++ b/test/framework/build.gradle @@ -19,7 +19,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks apply plugin: 'elasticsearch.build' -apply plugin: 'com.bmuschko.nexus' dependencies { compile "org.elasticsearch:elasticsearch:${version}" diff --git a/test-framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java rename to test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java diff --git a/test-framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java rename to test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java rename to test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java rename to test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java rename to test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java b/test/framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java b/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java rename to test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java rename to test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java diff --git a/test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java rename to test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java diff --git a/test-framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/node/MockNode.java rename to test/framework/src/main/java/org/elasticsearch/node/MockNode.java diff --git a/test-framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java b/test/framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java rename to test/framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java diff --git a/test-framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java b/test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java rename to test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java diff --git a/test-framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java rename to test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java diff --git a/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java rename to test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java rename to test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java rename to test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/DummyShardLock.java b/test/framework/src/main/java/org/elasticsearch/test/DummyShardLock.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/DummyShardLock.java rename to test/framework/src/main/java/org/elasticsearch/test/DummyShardLock.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java rename to test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java b/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java rename to test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java rename to test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java rename to test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java b/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java rename to test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/StreamsUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/TestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/TestCluster.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java rename to test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/VersionUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java rename to test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java rename to test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java rename to test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java rename to test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java b/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java rename to test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java b/test/framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/Stash.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java rename to test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java rename to test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java diff --git a/test-framework/src/main/resources/log4j.properties b/test/framework/src/main/resources/log4j.properties similarity index 100% rename from test-framework/src/main/resources/log4j.properties rename to test/framework/src/main/resources/log4j.properties diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java rename to test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java rename to test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml From 801de8f2ba296db62afae25ff201de92cb6ddfd7 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 18 Dec 2015 06:48:52 +0100 Subject: [PATCH 120/322] Fix usage of latch in RetryTests --- .../org/elasticsearch/action/bulk/RetryTests.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 5b9ab898b0e..4d73f932d2f 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -31,6 +31,7 @@ import org.junit.After; import org.junit.Before; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.*; @@ -91,7 +92,6 @@ public class RetryTests extends ESTestCase { assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); } - @AwaitsFix(bugUrl = "spuriously fails on Jenkins. Investigation ongoing.") public void testAsyncRetryBacksOff() throws Exception { BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); AssertingListener listener = new AssertingListener(); @@ -108,7 +108,6 @@ public class RetryTests extends ESTestCase { listener.assertOnFailureNeverCalled(); } - @AwaitsFix(bugUrl = "spuriously fails on Jenkins. Investigation ongoing.") public void testAsyncRetryFailsAfterBacksOff() throws Exception { BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); AssertingListener listener = new AssertingListener(); @@ -128,7 +127,7 @@ public class RetryTests extends ESTestCase { private static class AssertingListener implements ActionListener { private final CountDownLatch latch; - private volatile int countOnResponseCalled = 0; + private final AtomicInteger countOnResponseCalled = new AtomicInteger(); private volatile Throwable lastFailure; private volatile BulkResponse response; @@ -142,19 +141,19 @@ public class RetryTests extends ESTestCase { @Override public void onResponse(BulkResponse bulkItemResponses) { - latch.countDown(); this.response = bulkItemResponses; - countOnResponseCalled++; + countOnResponseCalled.incrementAndGet(); + latch.countDown(); } @Override public void onFailure(Throwable e) { - latch.countDown(); this.lastFailure = e; + latch.countDown(); } public void assertOnResponseCalled() { - assertThat(countOnResponseCalled, equalTo(1)); + assertThat(countOnResponseCalled.get(), equalTo(1)); } public void assertResponseWithNumberOfItems(int numItems) { From fbff877ec327df119179b016ac13f95fd9125e5a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Dec 2015 22:31:40 -0800 Subject: [PATCH 121/322] Fix thirdpartyaudit to ignore all elasticsearch packages --- .../elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 83fe9115083..814f9c1e60b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -98,7 +98,7 @@ public class ThirdPartyAuditTask extends DefaultTask { // we only want third party dependencies. FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> - dependency.group != "org.elasticsearch" + dependency.group.startsWith("org.elasticsearch") == false }) // we don't want provided dependencies, which we have already scanned. e.g. don't From 2093ea50d5e4b0b9125391f21ede374fd3b964db Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Dec 2015 23:15:18 -0800 Subject: [PATCH 122/322] Docs: Fix nodeSettings example for integ tests to use correct Settings.Builder reference --- docs/reference/testing/testing-framework.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/testing/testing-framework.asciidoc b/docs/reference/testing/testing-framework.asciidoc index 9c0e5f4f10d..e0b27733441 100644 --- a/docs/reference/testing/testing-framework.asciidoc +++ b/docs/reference/testing/testing-framework.asciidoc @@ -116,7 +116,7 @@ public class Mytests extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put("node.mode", "network") .build(); } From 711f949ca2b6c2b6879a1b3ff6cd03d9958b4ae6 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Dec 2015 09:15:01 +0100 Subject: [PATCH 123/322] Convert `transport.tcp.compress` setting --- .../org/elasticsearch/common/settings/ClusterSettings.java | 3 ++- .../src/main/java/org/elasticsearch/transport/Transport.java | 5 +---- .../org/elasticsearch/transport/netty/NettyTransport.java | 2 +- .../java/org/elasticsearch/test/InternalTestCluster.java | 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index ac976268a0c..ac9631d29b1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -136,5 +136,6 @@ public final class ClusterSettings extends AbstractScopedSettings { InternalClusterService.CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, - Transport.TRANSPORT_PROFILES_SETTING))); + Transport.TRANSPORT_PROFILES_SETTING, + Transport.TRANSPORT_TCP_COMPRESS))); } diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index 16270234494..78b07e3aae3 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -37,10 +37,7 @@ public interface Transport extends LifecycleComponent { Setting TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", true, Setting.Scope.CLUSTER); - - public static class TransportSettings { - public static final String TRANSPORT_TCP_COMPRESS = "transport.tcp.compress"; - } + Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, Setting.Scope.CLUSTER); void transportServiceAdapter(TransportServiceAdapter service); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 37f1dc4fa0b..6a6a6c38011 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -225,7 +225,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT))); this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null); this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1); - this.compress = settings.getAsBoolean(TransportSettings.TRANSPORT_TCP_COMPRESS, false); + this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); this.connectionsPerNodeRecovery = this.settings.getAsInt("transport.netty.connections_per_node.recovery", settings.getAsInt(CONNECTIONS_PER_NODE_RECOVERY, 2)); this.connectionsPerNodeBulk = this.settings.getAsInt("transport.netty.connections_per_node.bulk", settings.getAsInt(CONNECTIONS_PER_NODE_BULK, 3)); diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index d1749399146..10d4482a24c 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -372,7 +372,7 @@ public final class InternalTestCluster extends TestCluster { Builder builder = Settings.settingsBuilder() .put(SETTING_CLUSTER_NODE_SEED, seed); if (isLocalTransportConfigured() == false) { - builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, rarely(random)); + builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); } if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); From 5b991b9d5e90258422964023d49ef2315dc201bd Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Dec 2015 09:43:56 +0100 Subject: [PATCH 124/322] Check for tragic event on all kinds of exceptions not only ACE and IOException It's important to close not matter what exception caused a tragic event. Today we only check on IOException and AlreadyClosedExceptions. The test had a bug and threw an IAE instead causing the translog not to be closed. --- .../index/translog/Translog.java | 5 +- .../index/translog/TranslogTests.java | 59 ++++++++++++++++--- 2 files changed, 53 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index a105f652728..fd5c64f96ac 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -424,6 +424,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC closeOnTragicEvent(ex); throw ex; } catch (Throwable e) { + closeOnTragicEvent(e); throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e); } finally { Releasables.close(out.bytes()); @@ -500,7 +501,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (closed.get() == false) { current.sync(); } - } catch (AlreadyClosedException | IOException ex) { + } catch (Throwable ex) { closeOnTragicEvent(ex); throw ex; } @@ -533,7 +534,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } - } catch (AlreadyClosedException | IOException ex) { + } catch (Throwable ex) { closeOnTragicEvent(ex); throw ex; } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 51de00f74a6..8b3294c15b8 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -63,7 +63,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Predicate; import static org.hamcrest.Matchers.*; @@ -1387,6 +1386,35 @@ public class TranslogTests extends ESTestCase { } } + public void testTragicEventCanBeAnyException() throws IOException { + Path tempDir = createTempDir(); + final AtomicBoolean fail = new AtomicBoolean(); + TranslogConfig config = getTranslogConfig(tempDir); + assumeFalse("this won't work if we sync on any op",config.isSyncOnEachOperation()); + Translog translog = getFailableTranslog(fail, config, false, true); + LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly + translog.add(new Translog.Index("test", "1", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); + fail.set(true); + try { + Translog.Location location = translog.add(new Translog.Index("test", "2", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); + if (config.getType() == TranslogWriter.Type.BUFFERED) { // the buffered case will fail on the add if we exceed the buffer or will fail on the flush once we sync + if (randomBoolean()) { + translog.ensureSynced(location); + } else { + translog.sync(); + } + } + //TODO once we have a mock FS that can simulate we can also fail on plain sync + fail("WTF"); + } catch (UnknownException ex) { + // w00t + } catch (TranslogException ex) { + assertTrue(ex.getCause() instanceof UnknownException); + } + assertFalse(translog.isOpen()); + assertTrue(translog.getTragicException() instanceof UnknownException); + } + public void testFatalIOExceptionsWhileWritingConcurrently() throws IOException, InterruptedException { Path tempDir = createTempDir(); final AtomicBoolean fail = new AtomicBoolean(false); @@ -1432,9 +1460,9 @@ public class TranslogTests extends ESTestCase { } boolean atLeastOneFailed = false; for (Throwable ex : threadExceptions) { + assertTrue(ex.toString(), ex instanceof IOException || ex instanceof AlreadyClosedException); if (ex != null) { atLeastOneFailed = true; - break; } } if (atLeastOneFailed == false) { @@ -1477,8 +1505,11 @@ public class TranslogTests extends ESTestCase { } } } - private Translog getFailableTranslog(final AtomicBoolean fail, final TranslogConfig config) throws IOException { + return getFailableTranslog(fail, config, randomBoolean(), false); + } + + private Translog getFailableTranslog(final AtomicBoolean fail, final TranslogConfig config, final boolean paritalWrites, final boolean throwUnknownException) throws IOException { return new Translog(config) { @Override TranslogWriter.ChannelFactory getChannelFactory() { @@ -1488,7 +1519,7 @@ public class TranslogTests extends ESTestCase { @Override public FileChannel open(Path file) throws IOException { FileChannel channel = factory.open(file); - return new ThrowingFileChannel(fail, randomBoolean(), channel); + return new ThrowingFileChannel(fail, paritalWrites, throwUnknownException, channel); } }; } @@ -1498,11 +1529,13 @@ public class TranslogTests extends ESTestCase { public static class ThrowingFileChannel extends FilterFileChannel { private final AtomicBoolean fail; private final boolean partialWrite; + private final boolean throwUnknownException; - public ThrowingFileChannel(AtomicBoolean fail, boolean partialWrite, FileChannel delegate) { + public ThrowingFileChannel(AtomicBoolean fail, boolean partialWrite, boolean throwUnknownException, FileChannel delegate) { super(delegate); this.fail = fail; this.partialWrite = partialWrite; + this.throwUnknownException = throwUnknownException; } @Override @@ -1519,19 +1552,27 @@ public class TranslogTests extends ESTestCase { public int write(ByteBuffer src) throws IOException { if (fail.get()) { if (partialWrite) { - if (src.limit() > 1) { + if (src.hasRemaining()) { final int pos = src.position(); final int limit = src.limit(); - src.limit(limit / 2); + src.limit(randomIntBetween(pos, limit)); super.write(src); - src.position(pos); src.limit(limit); + src.position(pos); throw new IOException("__FAKE__ no space left on device"); } } - throw new MockDirectoryWrapper.FakeIOException(); + if (throwUnknownException) { + throw new UnknownException(); + } else { + throw new MockDirectoryWrapper.FakeIOException(); + } } return super.write(src); } } + + private static final class UnknownException extends RuntimeException { + + } } From 186242145f98be507ab2041950a8d20458ef245b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Dec 2015 10:26:57 +0100 Subject: [PATCH 125/322] [TEST] use new settings infra --- .../test/java/org/elasticsearch/gateway/QuorumGatewayIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index 69c518eb9c6..a817b23949f 100644 --- a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -51,7 +51,7 @@ public class QuorumGatewayIT extends ESIntegTestCase { logger.info("--> starting 3 nodes"); // we are shutting down nodes - make sure we don't have 2 clusters if we test network internalCluster().startNodesAsync(3, - Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2).build()).get(); + Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2).build()).get(); createIndex("test"); From 887789d1a949ab4912251fd320b35ba4164f044c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Dec 2015 17:32:21 +0100 Subject: [PATCH 126/322] Gradle idea plugin does not properly mark resources directories --- build.gradle | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/build.gradle b/build.gradle index ab40587d5a1..3a94259db97 100644 --- a/build.gradle +++ b/build.gradle @@ -179,6 +179,30 @@ gradle.projectsEvaluated { // intellij configuration allprojects { apply plugin: 'idea' + + idea { + module { + // same as for the IntelliJ Gradle tooling integration + inheritOutputDirs = false + outputDir = file('build/classes/main') + testOutputDir = file('build/classes/test') + + iml { + // fix so that Gradle idea plugin properly generates support for resource folders + // see also https://issues.gradle.org/browse/GRADLE-2975 + withXml { + it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/main/resources' }.each { + it.attributes().remove('isTestSource') + it.attributes().put('type', 'java-resource') + } + it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/test/resources' }.each { + it.attributes().remove('isTestSource') + it.attributes().put('type', 'java-test-resource') + } + } + } + } + } } idea { From 15588a499104c73b95df831c74cab79db3f3257b Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Thu, 17 Dec 2015 17:45:10 +0000 Subject: [PATCH 127/322] Aggregations: Run pipeline aggregations for empty buckets added in the Range Aggregation Closes #15471 --- .../bucket/histogram/InternalHistogram.java | 22 +++-- .../messy/tests/BucketSelectorTests.java | 86 ++++++++++++++++++- .../messy/tests/ScriptedMetricTests.java | 7 +- 3 files changed, 103 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index a87987452b8..faca359d766 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -391,12 +391,14 @@ public class InternalHistogram extends Inter return reducedBuckets; } - private void addEmptyBuckets(List list) { + private void addEmptyBuckets(List list, ReduceContext reduceContext) { B lastBucket = null; ExtendedBounds bounds = emptyBucketInfo.bounds; ListIterator iter = list.listIterator(); // first adding all the empty buckets *before* the actual data (based on th extended_bounds.min the user requested) + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(Collections.singletonList(emptyBucketInfo.subAggregations), + reduceContext); if (bounds != null) { B firstBucket = iter.hasNext() ? list.get(iter.nextIndex()) : null; if (firstBucket == null) { @@ -404,7 +406,9 @@ public class InternalHistogram extends Inter long key = bounds.min; long max = bounds.max; while (key <= max) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, + keyed, formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -413,7 +417,9 @@ public class InternalHistogram extends Inter long key = bounds.min; if (key < firstBucket.key) { while (key < firstBucket.key) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, + keyed, formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -428,7 +434,9 @@ public class InternalHistogram extends Inter if (lastBucket != null) { long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key); while (key < nextBucket.key) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, keyed, + formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } assert key == nextBucket.key; @@ -441,7 +449,9 @@ public class InternalHistogram extends Inter long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key); long max = bounds.max; while (key <= max) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, keyed, + formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -453,7 +463,7 @@ public class InternalHistogram extends Inter // adding empty buckets if needed if (minDocCount == 0) { - addEmptyBuckets(reducedBuckets); + addEmptyBuckets(reducedBuckets, reduceContext); } if (order == InternalOrder.KEY_ASC) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java index 2883b74cc1d..a1faea0b5e5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java @@ -45,12 +45,14 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.having; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.SuiteScopeTestCase public class BucketSelectorTests extends ESIntegTestCase { @@ -74,6 +76,7 @@ public class BucketSelectorTests extends ESIntegTestCase { public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); + createIndex("idx_with_gaps"); interval = randomIntBetween(1, 50); numDocs = randomIntBetween(10, 500); @@ -84,6 +87,10 @@ public class BucketSelectorTests extends ESIntegTestCase { for (int docs = 0; docs < numDocs; docs++) { builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); } + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 2, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 3, 0, 0))); client().preparePutIndexedScript().setId("my_script").setScriptLang(GroovyScriptEngineService.NAME) .setSource("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }").get(); @@ -93,12 +100,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } private XContentBuilder newDocBuilder() throws IOException { + return newDocBuilder(randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber), + randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber)); + } + + private XContentBuilder newDocBuilder(int field1Value, int field2Value, int field3Value, int field4Value) throws IOException { XContentBuilder jsonBuilder = jsonBuilder(); jsonBuilder.startObject(); - jsonBuilder.field(FIELD_1_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_2_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_3_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_4_NAME, randomIntBetween(minNumber, maxNumber)); + jsonBuilder.field(FIELD_1_NAME, field1Value); + jsonBuilder.field(FIELD_2_NAME, field2Value); + jsonBuilder.field(FIELD_3_NAME, field3Value); + jsonBuilder.field(FIELD_4_NAME, field4Value); jsonBuilder.endObject(); return jsonBuilder; } @@ -451,4 +463,70 @@ public class BucketSelectorTests extends ESIntegTestCase { assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } + + public void testEmptyBuckets() { + SearchResponse response = client().prepareSearch("idx_with_gaps") + .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(1) + .subAggregation(histogram("inner_histo").field(FIELD_1_NAME).interval(1).extendedBounds(1l, 4l).minDocCount(0) + .subAggregation(derivative("derivative").setBucketsPaths("_count").gapPolicy(GapPolicy.INSERT_ZEROS)))) + .execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("1")); + Histogram innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + List innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("2")); + innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("3")); + innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java index c54510acd4e..98d53c85174 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java @@ -58,7 +58,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @ClusterScope(scope = Scope.SUITE) @@ -739,6 +738,10 @@ public class ScriptedMetricTests extends ESIntegTestCase { ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted"); assertThat(scriptedMetric, notNullValue()); assertThat(scriptedMetric.getName(), equalTo("scripted")); - assertThat(scriptedMetric.aggregation(), nullValue()); + assertThat(scriptedMetric.aggregation(), notNullValue()); + assertThat(scriptedMetric.aggregation(), instanceOf(List.class)); + List aggregationResult = (List) scriptedMetric.aggregation(); + assertThat(aggregationResult.size(), equalTo(1)); + assertThat(aggregationResult.get(0), equalTo(0)); } } From 9d9b557cea945da771688d22992104060f9988a3 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Fri, 11 Dec 2015 18:33:59 -0500 Subject: [PATCH 128/322] Don't allow nodes with missing custom data types to join cluster --- .../elasticsearch/cluster/ClusterState.java | 2 +- .../cluster/metadata/MetaData.java | 2 +- .../discovery/zen/ZenDiscovery.java | 15 ++- .../zen/membership/MembershipAction.java | 30 ++++-- .../discovery/zen/ZenDiscoveryIT.java | 68 ++++++++++-- .../DedicatedClusterSnapshotRestoreIT.java | 82 +------------- .../test/TestCustomMetaData.java | 102 ++++++++++++++++++ 7 files changed, 196 insertions(+), 105 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index e20f21b4cec..34ccfd3b433 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -129,7 +129,7 @@ public class ClusterState implements ToXContent, Diffable { @SuppressWarnings("unchecked") T proto = (T)customPrototypes.get(type); if (proto == null) { - throw new IllegalArgumentException("No custom state prototype registered for type [" + type + "]"); + throw new IllegalArgumentException("No custom state prototype registered for type [" + type + "], node likely missing plugins"); } return proto; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index c84f5b3690c..55cb8a5d944 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -134,7 +134,7 @@ public class MetaData implements Iterable, Diffable, Fr //noinspection unchecked T proto = (T) customPrototypes.get(type); if (proto == null) { - throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]"); + throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "], node likely missing plugins"); } return proto; } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index d69227d8f0b..8849a849f97 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -836,8 +836,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } } - void handleJoinRequest(final DiscoveryNode node, final MembershipAction.JoinCallback callback) { - + void handleJoinRequest(final DiscoveryNode node, final ClusterState state, final MembershipAction.JoinCallback callback) { if (!transportService.addressSupported(node.address().getClass())) { // TODO, what should we do now? Maybe inform that node that its crap? logger.warn("received a wrong address type from [{}], ignoring...", node); @@ -849,7 +848,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // Sanity check: maybe we don't end up here, because serialization may have failed. if (node.getVersion().before(minimumNodeJoinVersion)) { callback.onFailure( - new IllegalStateException("Can't handle join request from a node with a version [" + node.getVersion() + "] that is lower than the minimum compatible version [" + minimumNodeJoinVersion.minimumCompatibilityVersion() + "]") + new IllegalStateException("Can't handle join request from a node with a version [" + node.getVersion() + "] that is lower than the minimum compatible version [" + minimumNodeJoinVersion.minimumCompatibilityVersion() + "]") ); return; } @@ -859,7 +858,13 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // validate the join request, will throw a failure if it fails, which will get back to the // node calling the join request - membership.sendValidateJoinRequestBlocking(node, joinTimeout); + try { + membership.sendValidateJoinRequestBlocking(node, state, joinTimeout); + } catch (Throwable e) { + logger.warn("failed to validate incoming join request from node [{}]", node); + callback.onFailure(new IllegalStateException("failure when sending a validation request to node", e)); + return; + } nodeJoinController.handleJoinRequest(node, callback); } } @@ -1039,7 +1044,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen private class MembershipListener implements MembershipAction.MembershipListener { @Override public void onJoin(DiscoveryNode node, MembershipAction.JoinCallback callback) { - handleJoinRequest(node, callback); + handleJoinRequest(node, clusterService.state(), callback); } @Override diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java index 4260b992ddb..5a96addc842 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.discovery.zen.membership; import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; @@ -88,10 +89,6 @@ public class MembershipAction extends AbstractComponent { transportService.submitRequest(masterNode, DISCOVERY_LEAVE_ACTION_NAME, new LeaveRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(timeout.millis(), TimeUnit.MILLISECONDS); } - public void sendJoinRequest(DiscoveryNode masterNode, DiscoveryNode node) { - transportService.sendRequest(masterNode, DISCOVERY_JOIN_ACTION_NAME, new JoinRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME); - } - public void sendJoinRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) { transportService.submitRequest(masterNode, DISCOVERY_JOIN_ACTION_NAME, new JoinRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME) .txGet(timeout.millis(), TimeUnit.MILLISECONDS); @@ -100,8 +97,8 @@ public class MembershipAction extends AbstractComponent { /** * Validates the join request, throwing a failure if it failed. */ - public void sendValidateJoinRequestBlocking(DiscoveryNode node, TimeValue timeout) { - transportService.submitRequest(node, DISCOVERY_JOIN_VALIDATE_ACTION_NAME, new ValidateJoinRequest(), EmptyTransportResponseHandler.INSTANCE_SAME) + public void sendValidateJoinRequestBlocking(DiscoveryNode node, ClusterState state, TimeValue timeout) { + transportService.submitRequest(node, DISCOVERY_JOIN_VALIDATE_ACTION_NAME, new ValidateJoinRequest(state), EmptyTransportResponseHandler.INSTANCE_SAME) .txGet(timeout.millis(), TimeUnit.MILLISECONDS); } @@ -156,9 +153,26 @@ public class MembershipAction extends AbstractComponent { } } - public static class ValidateJoinRequest extends TransportRequest { + class ValidateJoinRequest extends TransportRequest { + private ClusterState state; - public ValidateJoinRequest() { + ValidateJoinRequest() { + } + + ValidateJoinRequest(ClusterState state) { + this.state = state; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.state = ClusterState.Builder.readFrom(in, nodesProvider.nodes().localNode()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + this.state.writeTo(out); } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 3b6708630ee..217e86526cc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -24,10 +24,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Priority; @@ -45,6 +43,7 @@ import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.TestCustomMetaData; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; @@ -57,9 +56,7 @@ import org.hamcrest.Matchers; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; +import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; @@ -228,16 +225,69 @@ public class ZenDiscoveryIT extends ESIntegTestCase { assertThat(ExceptionsHelper.detailedMessage(reference.get()), containsString("cluster state from a different master than the current one, rejecting")); } + public void testHandleNodeJoin_incompatibleClusterState() throws UnknownHostException { + Settings nodeSettings = Settings.settingsBuilder() + .put("discovery.type", "zen") // <-- To override the local setting if set externally + .build(); + String masterOnlyNode = internalCluster().startMasterOnlyNode(nodeSettings); + String node1 = internalCluster().startNode(nodeSettings); + ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, masterOnlyNode); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, node1); + final ClusterState state = clusterService.state(); + MetaData.Builder mdBuilder = MetaData.builder(state.metaData()); + mdBuilder.putCustom(CustomMetaData.TYPE, new CustomMetaData("data")); + ClusterState stateWithCustomMetaData = ClusterState.builder(state).metaData(mdBuilder).build(); + + final AtomicReference holder = new AtomicReference<>(); + DiscoveryNode node = state.nodes().localNode(); + zenDiscovery.handleJoinRequest(node, stateWithCustomMetaData, new MembershipAction.JoinCallback() { + @Override + public void onSuccess() { + } + + @Override + public void onFailure(Throwable t) { + holder.set((IllegalStateException) t); + } + }); + + assertThat(holder.get(), notNullValue()); + assertThat(holder.get().getMessage(), equalTo("failure when sending a validation request to node")); + } + + public static class CustomMetaData extends TestCustomMetaData { + public static final String TYPE = "custom_md"; + + CustomMetaData(String data) { + super(data); + } + + @Override + protected TestCustomMetaData newTestCustomMetaData(String data) { + return new CustomMetaData(data); + } + + @Override + public String type() { + return TYPE; + } + + @Override + public EnumSet context() { + return EnumSet.of(MetaData.XContentContext.GATEWAY, MetaData.XContentContext.SNAPSHOT); + } + } + public void testHandleNodeJoin_incompatibleMinVersion() throws UnknownHostException { Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally .build(); String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1); ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName); - + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0); final AtomicReference holder = new AtomicReference<>(); - zenDiscovery.handleJoinRequest(node, new MembershipAction.JoinCallback() { + zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() { @Override public void onSuccess() { } diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 9133828c070..7946116f571 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -33,24 +32,17 @@ import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.metadata.MetaData.Custom; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.index.store.IndexStore; @@ -68,9 +60,9 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.TestCustomMetaData; import org.elasticsearch.test.rest.FakeRestRequest; -import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; @@ -902,78 +894,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest )); } - public static abstract class TestCustomMetaData extends AbstractDiffable implements MetaData.Custom { - private final String data; - - protected TestCustomMetaData(String data) { - this.data = data; - } - - public String getData() { - return data; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TestCustomMetaData that = (TestCustomMetaData) o; - - if (!data.equals(that.data)) return false; - - return true; - } - - @Override - public int hashCode() { - return data.hashCode(); - } - - protected abstract TestCustomMetaData newTestCustomMetaData(String data); - - @Override - public Custom readFrom(StreamInput in) throws IOException { - return newTestCustomMetaData(in.readString()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(getData()); - } - - @Override - public Custom fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - String data = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String currentFieldName = parser.currentName(); - if ("data".equals(currentFieldName)) { - if (parser.nextToken() != XContentParser.Token.VALUE_STRING) { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, invalid data type"); - } - data = parser.text(); - } else { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [{}]", currentFieldName); - } - } else { - throw new ElasticsearchParseException("failed to parse snapshottable metadata"); - } - } - if (data == null) { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, data not found"); - } - return newTestCustomMetaData(data); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.field("data", getData()); - return builder; - } - } - static { MetaData.registerPrototype(SnapshottableMetadata.TYPE, SnapshottableMetadata.PROTO); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java new file mode 100644 index 00000000000..92d5b95cfac --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public abstract class TestCustomMetaData extends AbstractDiffable implements MetaData.Custom { + private final String data; + + protected TestCustomMetaData(String data) { + this.data = data; + } + + public String getData() { + return data; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TestCustomMetaData that = (TestCustomMetaData) o; + + if (!data.equals(that.data)) return false; + + return true; + } + + @Override + public int hashCode() { + return data.hashCode(); + } + + protected abstract TestCustomMetaData newTestCustomMetaData(String data); + + @Override + public MetaData.Custom readFrom(StreamInput in) throws IOException { + return newTestCustomMetaData(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(getData()); + } + + @Override + public MetaData.Custom fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token; + String data = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String currentFieldName = parser.currentName(); + if ("data".equals(currentFieldName)) { + if (parser.nextToken() != XContentParser.Token.VALUE_STRING) { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, invalid data type"); + } + data = parser.text(); + } else { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [{}]", currentFieldName); + } + } else { + throw new ElasticsearchParseException("failed to parse snapshottable metadata"); + } + } + if (data == null) { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, data not found"); + } + return newTestCustomMetaData(data); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("data", getData()); + return builder; + } +} From 94d6b221372810b3d188bc9c6d51ab0302d98f4b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 11:43:58 -0500 Subject: [PATCH 129/322] add gradle licenseHeaders to precommit This is a port of the logic from apache lucene that uses Rat --- build.gradle | 5 +- .../precommit/LicenseHeadersTask.groovy | 156 ++++++++++++++++++ .../gradle/precommit/PrecommitTasks.groovy | 1 + .../precommit/ThirdPartyAuditTask.groovy | 2 +- 4 files changed, 161 insertions(+), 3 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy diff --git a/build.gradle b/build.gradle index ab40587d5a1..204986cea04 100644 --- a/build.gradle +++ b/build.gradle @@ -128,10 +128,11 @@ subprojects { // Only if your buildscript and Ant's optional task need the same library would you have to define it twice. // https://docs.gradle.org/current/userguide/organizing_build_logic.html configurations { - forbiddenApis + buildTools } dependencies { - forbiddenApis 'de.thetaphi:forbiddenapis:2.0' + buildTools 'de.thetaphi:forbiddenapis:2.0' + buildTools 'org.apache.rat:apache-rat:0.11' } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy new file mode 100644 index 00000000000..7c57fd23c23 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit + +import java.nio.file.Files + +import org.gradle.api.DefaultTask +import org.gradle.api.tasks.SourceSet +import org.gradle.api.tasks.TaskAction + +import groovy.xml.NamespaceBuilder +import groovy.xml.NamespaceBuilderSupport + +/** + * Checks files for license headers. + *

+ * This is a port of the apache lucene check + */ +public class LicenseHeadersTask extends DefaultTask { + + LicenseHeadersTask() { + description = "Checks sources for missing, incorrect, or unacceptable license headers" + } + + @TaskAction + public void check() { + // load rat tasks + AntBuilder ant = new AntBuilder() + ant.typedef(resource: "org/apache/rat/anttasks/antlib.xml", + uri: "antlib:org.apache.rat.anttasks", + classpath: project.configurations.buildTools.asPath) + NamespaceBuilderSupport rat = NamespaceBuilder.newInstance(ant, "antlib:org.apache.rat.anttasks") + + // create a file for the log to go to under reports/ + File reportDir = new File(project.buildDir, "reports/licenseHeaders") + reportDir.mkdirs() + File reportFile = new File(reportDir, "rat.log") + Files.deleteIfExists(reportFile.toPath()) + + // run rat, going to the file + rat.report(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) { + // checks all the java sources (allJava) + for (SourceSet set : project.sourceSets) { + for (File dir : set.allJava.srcDirs) { + ant.fileset(dir: dir) + } + } + + // BSD 4-clause stuff (is disallowed below) + substringMatcher(licenseFamilyCategory: "BSD4 ", + licenseFamilyName: "Original BSD License (with advertising clause)") { + pattern(substring: "All advertising materials") + } + + // BSD-like stuff + substringMatcher(licenseFamilyCategory: "BSD ", + licenseFamilyName: "Modified BSD License") { + // brics automaton + pattern(substring: "Copyright (c) 2001-2009 Anders Moeller") + // snowball + pattern(substring: "Copyright (c) 2001, Dr Martin Porter") + // UMASS kstem + pattern(substring: "THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS") + // Egothor + pattern(substring: "Egothor Software License version 1.00") + // JaSpell + pattern(substring: "Copyright (c) 2005 Bruno Martins") + // d3.js + pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS") + // highlight.js + pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS") + } + + // MIT-like + substringMatcher(licenseFamilyCategory: "MIT ", + licenseFamilyName: "The MIT License") { + // ICU license + pattern(substring: "Permission is hereby granted, free of charge, to any person obtaining a copy") + } + + // Apache + substringMatcher(licenseFamilyCategory: "AL ", + licenseFamilyName: "Apache") { + // Apache license (ES) + pattern(substring: "Licensed to Elasticsearch under one or more contributor") + // Apache license (ASF) + pattern(substring: "Licensed to the Apache Software Foundation (ASF) under") + // this is the old-school one under some files + pattern(substring: "Licensed under the Apache License, Version 2.0 (the \"License\")") + } + + // Generated resources + substringMatcher(licenseFamilyCategory: "GEN ", + licenseFamilyName: "Generated") { + // svg files generated by gnuplot + pattern(substring: "Produced by GNUPLOT") + // snowball stemmers generated by snowball compiler + pattern(substring: "This file was generated automatically by the Snowball to Java compiler") + // uima tests generated by JCasGen + pattern(substring: "First created by JCasGen") + // parsers generated by antlr + pattern(substring: "ANTLR GENERATED CODE") + } + + // approved categories + approvedLicense(familyName: "Apache") + approvedLicense(familyName: "The MIT License") + approvedLicense(familyName: "Modified BSD License") + approvedLicense(familyName: "Generated") + } + + // check the license file for any errors, this should be fast. + boolean zeroUnknownLicenses = false + boolean foundProblemsWithFiles = false + reportFile.eachLine('UTF-8') { line -> + if (line.startsWith("0 Unknown Licenses")) { + zeroUnknownLicenses = true + } + + if (line.startsWith(" !")) { + foundProblemsWithFiles = true + } + } + + if (zeroUnknownLicenses == false || foundProblemsWithFiles) { + // print the unapproved license section, usually its all you need to fix problems. + int sectionNumber = 0 + reportFile.eachLine('UTF-8') { line -> + if (line.startsWith("*******************************")) { + sectionNumber++ + } else { + if (sectionNumber == 2) { + logger.error(line) + } + } + } + throw new IllegalStateException("License header problems were found! Full details: " + reportFile.absolutePath) + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index ef2a49cc444..f99032e1e2d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -34,6 +34,7 @@ class PrecommitTasks { List precommitTasks = [ configureForbiddenApis(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), + project.tasks.create('licenseHeaders', LicenseHeadersTask.class), project.tasks.create('jarHell', JarHellTask.class), project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)] diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 814f9c1e60b..45ab2300449 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -114,7 +114,7 @@ public class ThirdPartyAuditTask extends DefaultTask { ant.taskdef(name: "thirdPartyAudit", classname: "de.thetaphi.forbiddenapis.ant.AntTask", - classpath: project.configurations.forbiddenApis.asPath) + classpath: project.configurations.buildTools.asPath) // print which jars we are going to scan, always // this is not the time to try to be succinct! Forbidden will print plenty on its own! From e82808917c98c9ea2f3c8f9c337e5ec3558673d1 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 11:58:18 -0500 Subject: [PATCH 130/322] don't fail on missing source dirs --- .../elasticsearch/gradle/precommit/LicenseHeadersTask.groovy | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 7c57fd23c23..49687d933e3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -58,7 +58,10 @@ public class LicenseHeadersTask extends DefaultTask { // checks all the java sources (allJava) for (SourceSet set : project.sourceSets) { for (File dir : set.allJava.srcDirs) { - ant.fileset(dir: dir) + // sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main... + if (dir.exists()) { + ant.fileset(dir: dir) + } } } From c4f823903a5afecb20227f5f000696c093e4f163 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 12:00:52 -0500 Subject: [PATCH 131/322] fix indent --- .../elasticsearch/gradle/precommit/LicenseHeadersTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 49687d933e3..e680a546f5b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -155,5 +155,5 @@ public class LicenseHeadersTask extends DefaultTask { } throw new IllegalStateException("License header problems were found! Full details: " + reportFile.absolutePath) } - } + } } From 2e2e328879fd712d4b291dff22dd0c6bb8d2ec70 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 13:02:39 -0500 Subject: [PATCH 132/322] add missing license header --- .../mapper/attachments/TikaImplTests.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java index fc17d59603f..f42110c1e62 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java @@ -1,5 +1,24 @@ package org.elasticsearch.mapper.attachments; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.test.ESTestCase; public class TikaImplTests extends ESTestCase { From 447729f0e18186a775ce5f8c279f9ce1b292d2c0 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 13:08:17 -0500 Subject: [PATCH 133/322] add missing license headers --- .../plugin/hadoop/hdfs/Utils.java | 21 ++++++++++++++++++- .../hadoop/hdfs/HdfsRepositoryRestIT.java | 19 +++++++++++++++++ .../plugin/hadoop/hdfs/HdfsTestPlugin.java | 19 +++++++++++++++++ .../plugin/hadoop/hdfs/UtilsTests.java | 19 +++++++++++++++++ 4 files changed, 77 insertions(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java index 101025d029e..cf786179787 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java @@ -1,5 +1,24 @@ package org.elasticsearch.plugin.hadoop.hdfs; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import java.net.URL; import java.security.AccessControlContext; import java.security.AccessController; @@ -81,4 +100,4 @@ public abstract class Utils { return base; } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java index fd87e18cbce..8d8d6755cc3 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java @@ -1,5 +1,24 @@ package org.elasticsearch.plugin.hadoop.hdfs; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import java.io.IOException; import java.util.Collection; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java index 4b4e2aa05ef..8730a46a084 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java @@ -1,5 +1,24 @@ package org.elasticsearch.plugin.hadoop.hdfs; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import java.net.URL; import java.util.Collections; import java.util.List; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java index 2f492eee343..37aecb04b9b 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java @@ -1,5 +1,24 @@ package org.elasticsearch.plugin.hadoop.hdfs; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.test.ESTestCase; public class UtilsTests extends ESTestCase { From 6ba374d6326f59b801c01698c3ec9f5a64d20ac0 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 13:15:34 -0500 Subject: [PATCH 134/322] add missing license headers --- .../pipelining/HttpPipeliningHandler.java | 22 +++++++++++++++++++ .../OrderedDownstreamChannelEvent.java | 22 +++++++++++++++++++ .../OrderedUpstreamMessageEvent.java | 22 +++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java index 10008c76a54..4bcbf4079c0 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java @@ -1,5 +1,27 @@ package org.elasticsearch.http.netty.pipelining; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.jboss.netty.channel.*; diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java index 6b713a08020..622a3e6ac9f 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java @@ -1,5 +1,27 @@ package org.elasticsearch.http.netty.pipelining; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + import org.jboss.netty.channel.*; /** diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java index 7343b29b6c5..cc47b5be320 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java @@ -1,5 +1,27 @@ package org.elasticsearch.http.netty.pipelining; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.UpstreamMessageEvent; From 266bece2fe7cfbaf66ef91cf635b464fbf403189 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 13:19:09 -0500 Subject: [PATCH 135/322] add missing license headers --- .../profile/InternalProfileShardResults.java | 19 +++++++++++++++++++ .../master/IndexingMasterFailoverIT.java | 19 +++++++++++++++++++ .../cluster/routing/PrimaryAllocationIT.java | 19 +++++++++++++++++++ .../allocation/ActiveAllocationIdTests.java | 19 +++++++++++++++++++ 4 files changed, 76 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java index 2ab3b632119..e6052ff5095 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java @@ -1,5 +1,24 @@ package org.elasticsearch.search.profile; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 6946e35861c..b0c13f851a6 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -1,5 +1,24 @@ package org.elasticsearch.action.support.master; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index dcd35303b75..340fdcc3c99 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -1,5 +1,24 @@ package org.elasticsearch.cluster.routing; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java index 7a7f4722e97..2e54512b95f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java @@ -1,5 +1,24 @@ package org.elasticsearch.cluster.routing.allocation; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; From 56e4752d28a42834ef7e3faccd85b5c4259296e7 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 18 Dec 2015 08:19:40 +0100 Subject: [PATCH 136/322] Align handling of interrupts in BulkProcessor With this commit we implement a cancellation policy in BulkProcessor which is aligned for the sync and the async case and also document it. Closes #14833. --- .../elasticsearch/action/bulk/BulkProcessor.java | 3 +++ .../action/bulk/BulkRequestHandler.java | 16 +++++++++++----- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index af5af80ac2f..43014cfb759 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -61,6 +61,9 @@ public class BulkProcessor implements Closeable { /** * Callback after a failed execution of bulk request. + * + * Note that in case an instance of InterruptedException is passed, which means that request processing has been + * cancelled externally, the thread's interruption status has been restored prior to calling this method. */ void afterBulk(long executionId, BulkRequest request, Throwable failure); } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index ffc985bd510..dc98a16c578 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -74,11 +74,17 @@ abstract class BulkRequestHandler { .withSyncBackoff(client, bulkRequest); afterCalled = true; listener.afterBulk(executionId, bulkRequest, bulkResponse); - } catch (Exception e) { + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.info("Bulk request {} has been cancelled.", e, executionId); if (!afterCalled) { - logger.warn("Failed to executed bulk request {}.", e, executionId); listener.afterBulk(executionId, bulkRequest, e); } + } catch (Throwable t) { + logger.warn("Failed to execute bulk request {}.", t, executionId); + if (!afterCalled) { + listener.afterBulk(executionId, bulkRequest, t); + } } } @@ -135,11 +141,11 @@ abstract class BulkRequestHandler { }); bulkRequestSetupSuccessful = true; } catch (InterruptedException e) { - // This is intentionally wrong to avoid changing the behaviour implicitly with this PR. It will be fixed in #14833 - Thread.interrupted(); + Thread.currentThread().interrupt(); + logger.info("Bulk request {} has been cancelled.", e, executionId); listener.afterBulk(executionId, bulkRequest, e); } catch (Throwable t) { - logger.warn("Failed to executed bulk request {}.", t, executionId); + logger.warn("Failed to execute bulk request {}.", t, executionId); listener.afterBulk(executionId, bulkRequest, t); } finally { if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore From e51904fa002c6a8489c1bbfd9e325fa6e10e02bc Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 18 Dec 2015 07:52:33 +0100 Subject: [PATCH 137/322] Document usage of backoff policy in BulkProcessor With this commit we update the documentation to explain the new backoff feature in BulkProcessor. Relates to #14620. --- docs/java-api/docs/bulk.asciidoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/java-api/docs/bulk.asciidoc b/docs/java-api/docs/bulk.asciidoc index 6890f7c49d3..248326700c4 100644 --- a/docs/java-api/docs/bulk.asciidoc +++ b/docs/java-api/docs/bulk.asciidoc @@ -47,6 +47,7 @@ To use it, first create a `BulkProcessor` instance: [source,java] -------------------------------------------------- +import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -73,6 +74,8 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) <6> .setFlushInterval(TimeValue.timeValueSeconds(5)) <7> .setConcurrentRequests(1) <8> + .setBackoffPolicy( + BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3)) <9> .build(); -------------------------------------------------- <1> Add your elasticsearch client @@ -86,6 +89,10 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( <7> We want to flush the bulk every 5 seconds whatever the number of requests <8> Set the number of concurrent requests. A value of 0 means that only a single request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed while accumulating new bulk requests. +<9> Set a custom backoff policy which will initially wait for 100ms, increase exponentially and retries up to three + times. A retry is attempted whenever one or more bulk item requests have failed with an `EsRejectedExecutionException` + which indicates that there were too little compute resources available for processing the request. To disable backoff, + pass `BackoffPolicy.noBackoff()`. Then you can simply add your requests to the `BulkProcessor`: @@ -101,6 +108,7 @@ By default, `BulkProcessor`: * sets bulkSize to `5mb` * does not set flushInterval * sets concurrentRequests to 1 +* sets backoffPolicy to an exponential backoff with 8 retries and a start delay of 50ms. The total wait time is roughly 5.1 seconds. When all documents are loaded to the `BulkProcessor` it can be closed by using `awaitClose` or `close` methods: From 2ce54640f58942d6c78aa25fc9eebc4f5afbd85a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 13:46:59 -0500 Subject: [PATCH 138/322] Remove unnecessary license categories/matchers --- .../precommit/LicenseHeadersTask.groovy | 35 +------------------ 1 file changed, 1 insertion(+), 34 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index e680a546f5b..580c089461a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -66,37 +66,12 @@ public class LicenseHeadersTask extends DefaultTask { } // BSD 4-clause stuff (is disallowed below) + // we keep this here, in case someone adds BSD code for some reason, it should never be allowed. substringMatcher(licenseFamilyCategory: "BSD4 ", licenseFamilyName: "Original BSD License (with advertising clause)") { pattern(substring: "All advertising materials") } - // BSD-like stuff - substringMatcher(licenseFamilyCategory: "BSD ", - licenseFamilyName: "Modified BSD License") { - // brics automaton - pattern(substring: "Copyright (c) 2001-2009 Anders Moeller") - // snowball - pattern(substring: "Copyright (c) 2001, Dr Martin Porter") - // UMASS kstem - pattern(substring: "THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS") - // Egothor - pattern(substring: "Egothor Software License version 1.00") - // JaSpell - pattern(substring: "Copyright (c) 2005 Bruno Martins") - // d3.js - pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS") - // highlight.js - pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS") - } - - // MIT-like - substringMatcher(licenseFamilyCategory: "MIT ", - licenseFamilyName: "The MIT License") { - // ICU license - pattern(substring: "Permission is hereby granted, free of charge, to any person obtaining a copy") - } - // Apache substringMatcher(licenseFamilyCategory: "AL ", licenseFamilyName: "Apache") { @@ -111,20 +86,12 @@ public class LicenseHeadersTask extends DefaultTask { // Generated resources substringMatcher(licenseFamilyCategory: "GEN ", licenseFamilyName: "Generated") { - // svg files generated by gnuplot - pattern(substring: "Produced by GNUPLOT") - // snowball stemmers generated by snowball compiler - pattern(substring: "This file was generated automatically by the Snowball to Java compiler") - // uima tests generated by JCasGen - pattern(substring: "First created by JCasGen") // parsers generated by antlr pattern(substring: "ANTLR GENERATED CODE") } // approved categories approvedLicense(familyName: "Apache") - approvedLicense(familyName: "The MIT License") - approvedLicense(familyName: "Modified BSD License") approvedLicense(familyName: "Generated") } From 9f1dfdbaeaf7bdcd7aa4bb0679d5bb4d8dd096fb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 10:59:07 -0800 Subject: [PATCH 139/322] Build: Add AntTask to simplify controlling logging when running ant from gradle This new task allows setting code, similar to a doLast or doFirst, except it is specifically geared at running ant (and thus called doAnt). It adjusts the ant logging while running the ant so that the log level/behavior can be tweaked, and automatically buffers based on gradle logging level, and dumps the ant output upon failure. --- build.gradle | 11 -- buildSrc/build.gradle | 1 + .../org/elasticsearch/gradle/AntTask.groovy | 111 ++++++++++++++++++ .../precommit/LicenseHeadersTask.groovy | 31 +++-- .../precommit/ThirdPartyAuditTask.groovy | 73 ++++++------ 5 files changed, 166 insertions(+), 61 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy diff --git a/build.gradle b/build.gradle index 3d4067c1703..c31fe88f5d2 100644 --- a/build.gradle +++ b/build.gradle @@ -123,17 +123,6 @@ subprojects { } } } - // For reasons we don't fully understand yet, external dependencies are not picked up by Ant's optional tasks. - // But you can easily do it in another way. - // Only if your buildscript and Ant's optional task need the same library would you have to define it twice. - // https://docs.gradle.org/current/userguide/organizing_build_logic.html - configurations { - buildTools - } - dependencies { - buildTools 'de.thetaphi:forbiddenapis:2.0' - buildTools 'org.apache.rat:apache-rat:0.11' - } } // Ensure similar tasks in dependent projects run first. The projectsEvaluated here is diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index e46f9cb33c0..a0f06343d30 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -63,6 +63,7 @@ dependencies { compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... compile 'de.thetaphi:forbiddenapis:2.0' compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' + compile 'org.apache.rat:apache-rat:0.11' } processResources { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy new file mode 100644 index 00000000000..1b904150077 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle + +import org.apache.tools.ant.BuildException +import org.apache.tools.ant.BuildListener +import org.apache.tools.ant.BuildLogger +import org.apache.tools.ant.DefaultLogger +import org.apache.tools.ant.Project +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.Optional +import org.gradle.api.tasks.TaskAction + +import java.nio.charset.Charset + +/** + * A task which will run ant commands. + * + * Logging for the task is customizable for subclasses by overriding makeLogger. + */ +public class AntTask extends DefaultTask { + + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() + + @TaskAction + final void executeTask() { + // capture the current loggers + List savedLoggers = new ArrayList<>(); + for (BuildListener l : project.ant.project.getBuildListeners()) { + if (l instanceof BuildLogger) { + savedLoggers.add(l); + } + } + // remove them + for (BuildLogger l : savedLoggers) { + project.ant.project.removeBuildListener(l) + } + + final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO + final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) + BuildLogger antLogger = makeLogger(stream, outputLevel) + + // now run the command with just our logger + project.ant.project.addBuildListener(antLogger) + try { + runAnt(project.ant) + } catch (BuildException e) { + // ant failed, so see if we have buffered output to emit, then rethrow the failure + String buffer = outputBuffer.toString() + if (buffer.isEmpty() == false) { + logger.error("=== Ant output ===\n${buffer}") + } + throw e + } finally { + project.ant.project.removeBuildListener(antLogger) + // add back the old loggers before returning + for (BuildLogger l : savedLoggers) { + project.ant.project.addBuildListener(l) + } + } + } + + /** Runs the doAnt closure. This can be overridden by subclasses instead of having to set a closure. */ + protected void runAnt(AntBuilder ant) { + if (doAnt == null) { + throw new GradleException("Missing doAnt for ${name}") + } + doAnt(ant) + } + + /** Create the logger the ant runner will use, with the given stream for error/output. */ + protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { + return new DefaultLogger( + errorPrintStream: stream, + outputPrintStream: stream, + messageOutputLevel: outputLevel) + } + + /** + * Returns true if the ant logger should write to stdout, or false if to the buffer. + * The default implementation writes to the buffer when gradle info logging is disabled. + */ + protected boolean useStdout() { + return logger.isInfoEnabled() + } + + +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 580c089461a..de36af886b8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -18,34 +18,33 @@ */ package org.elasticsearch.gradle.precommit -import java.nio.file.Files - -import org.gradle.api.DefaultTask +import org.apache.rat.anttasks.Report +import org.apache.rat.anttasks.SubstringLicenseMatcher +import org.apache.rat.license.SimpleLicenseFamily +import org.elasticsearch.gradle.AntTask import org.gradle.api.tasks.SourceSet -import org.gradle.api.tasks.TaskAction -import groovy.xml.NamespaceBuilder -import groovy.xml.NamespaceBuilderSupport +import java.nio.file.Files /** * Checks files for license headers. *

* This is a port of the apache lucene check */ -public class LicenseHeadersTask extends DefaultTask { +public class LicenseHeadersTask extends AntTask { LicenseHeadersTask() { description = "Checks sources for missing, incorrect, or unacceptable license headers" + + if (ant.project.taskDefinitions.contains('ratReport') == false) { + ant.project.addTaskDefinition('ratReport', Report) + ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher) + ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily) + } } - @TaskAction - public void check() { - // load rat tasks - AntBuilder ant = new AntBuilder() - ant.typedef(resource: "org/apache/rat/anttasks/antlib.xml", - uri: "antlib:org.apache.rat.anttasks", - classpath: project.configurations.buildTools.asPath) - NamespaceBuilderSupport rat = NamespaceBuilder.newInstance(ant, "antlib:org.apache.rat.anttasks") + @Override + protected void runAnt(AntBuilder ant) { // create a file for the log to go to under reports/ File reportDir = new File(project.buildDir, "reports/licenseHeaders") @@ -54,7 +53,7 @@ public class LicenseHeadersTask extends DefaultTask { Files.deleteIfExists(reportFile.toPath()) // run rat, going to the file - rat.report(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) { + ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) { // checks all the java sources (allJava) for (SourceSet set : project.sourceSets) { for (File dir : set.allJava.srcDirs) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 45ab2300449..9ff348f20bb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -18,6 +18,10 @@ */ package org.elasticsearch.gradle.precommit +import org.apache.tools.ant.DefaultLogger +import org.elasticsearch.gradle.AntTask +import org.gradle.api.artifacts.Configuration + import java.nio.file.Files import java.nio.file.FileVisitResult import java.nio.file.Path @@ -35,7 +39,7 @@ import org.apache.tools.ant.Project /** * Basic static checking to keep tabs on third party JARs */ -public class ThirdPartyAuditTask extends DefaultTask { +public class ThirdPartyAuditTask extends AntTask { // true to be lenient about MISSING CLASSES private boolean missingClasses; @@ -46,6 +50,10 @@ public class ThirdPartyAuditTask extends DefaultTask { ThirdPartyAuditTask() { dependsOn(project.configurations.testCompile) description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'" + + if (ant.project.taskDefinitions.contains('thirdPartyAudit') == false) { + ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask) + } } /** @@ -84,38 +92,35 @@ public class ThirdPartyAuditTask extends DefaultTask { return excludes; } - @TaskAction - public void check() { - AntBuilder ant = new AntBuilder() + @Override + protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { + return new DefaultLogger( + errorPrintStream: stream, + outputPrintStream: stream, + // ignore passed in outputLevel for now, until we are filtering warning messages + messageOutputLevel: Project.MSG_ERR) + } - // we are noisy for many reasons, working around performance problems with forbidden-apis, dealing - // with warnings about missing classes, etc. so we use our own "quiet" AntBuilder - ant.project.buildListeners.each { listener -> - if (listener instanceof BuildLogger) { - listener.messageOutputLevel = Project.MSG_ERR; - } - }; - + @Override + protected void runAnt(AntBuilder ant) { // we only want third party dependencies. - FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> + FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> dependency.group.startsWith("org.elasticsearch") == false }) - + // we don't want provided dependencies, which we have already scanned. e.g. don't // scan ES core's dependencies for every single plugin - try { - jars -= project.configurations.getByName("provided") - } catch (UnknownConfigurationException ignored) {} - + Configuration provided = project.configurations.findByName('provided') + if (provided != null) { + jars -= provided + } + // no dependencies matched, we are done if (jars.isEmpty()) { return; } - - ant.taskdef(name: "thirdPartyAudit", - classname: "de.thetaphi.forbiddenapis.ant.AntTask", - classpath: project.configurations.buildTools.asPath) - + + // print which jars we are going to scan, always // this is not the time to try to be succinct! Forbidden will print plenty on its own! Set names = new HashSet<>() @@ -123,26 +128,26 @@ public class ThirdPartyAuditTask extends DefaultTask { names.add(jar.getName()) } logger.error("[thirdPartyAudit] Scanning: " + names) - + // warn that classes are missing // TODO: move these to excludes list! if (missingClasses) { logger.warn("[thirdPartyAudit] WARNING: CLASSES ARE MISSING! Expect NoClassDefFoundError in bug reports from users!") } - - // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, + + // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, // and then remove our temp dir afterwards. don't complain: try it yourself. // we don't use gradle temp dir handling, just google it, or try it yourself. - + File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit') - + // clean up any previous mess (if we failed), then unzip everything to one directory ant.delete(dir: tmpDir.getAbsolutePath()) tmpDir.mkdirs() for (File jar : jars) { ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()) } - + // convert exclusion class names to binary file names String[] excludedFiles = new String[excludes.length]; for (int i = 0; i < excludes.length; i++) { @@ -152,12 +157,12 @@ public class ThirdPartyAuditTask extends DefaultTask { throw new IllegalStateException("bogus thirdPartyAudit exclusion: '" + excludes[i] + "', not found in any dependency") } } - + // jarHellReprise checkSheistyClasses(tmpDir.toPath(), new HashSet<>(Arrays.asList(excludedFiles))); - - ant.thirdPartyAudit(internalRuntimeForbidden: true, - failOnUnsupportedJava: false, + + ant.thirdPartyAudit(internalRuntimeForbidden: true, + failOnUnsupportedJava: false, failOnMissingClasses: !missingClasses, classpath: project.configurations.testCompile.asPath) { fileset(dir: tmpDir, excludes: excludedFiles.join(',')) @@ -169,7 +174,7 @@ public class ThirdPartyAuditTask extends DefaultTask { /** * check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk! */ - private void checkSheistyClasses(Path root, Set excluded) { + protected void checkSheistyClasses(Path root, Set excluded) { // system.parent = extensions loader. // note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!). // but groovy/gradle needs to work at all first! From 5b9bf8e73862461d9a1c9fcdf9fd8e7f4af7bf3b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 12:01:54 -0800 Subject: [PATCH 140/322] Make a new ant builder per AntTask invocation --- .../org/elasticsearch/gradle/AntTask.groovy | 28 ++++++++----------- .../precommit/LicenseHeadersTask.groovy | 9 ++---- .../precommit/ThirdPartyAuditTask.groovy | 19 ++++--------- 3 files changed, 20 insertions(+), 36 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 1b904150077..7cd9b5b91a7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -47,26 +47,26 @@ public class AntTask extends DefaultTask { @TaskAction final void executeTask() { - // capture the current loggers - List savedLoggers = new ArrayList<>(); - for (BuildListener l : project.ant.project.getBuildListeners()) { + AntBuilder ant = new AntBuilder() + + // remove existing loggers, we add our own + List toRemove = new ArrayList<>(); + for (BuildListener l : ant.project.getBuildListeners()) { if (l instanceof BuildLogger) { - savedLoggers.add(l); + toRemove.add(l); } } - // remove them - for (BuildLogger l : savedLoggers) { - project.ant.project.removeBuildListener(l) + for (BuildLogger l : toRemove) { + ant.project.removeBuildListener(l) } - final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO + final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : (logger.isInfoEnabled() ? Project.MSG_INFO : Project.MSG_WARN) final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) BuildLogger antLogger = makeLogger(stream, outputLevel) - // now run the command with just our logger - project.ant.project.addBuildListener(antLogger) + ant.project.addBuildListener(antLogger) try { - runAnt(project.ant) + runAnt(ant) } catch (BuildException e) { // ant failed, so see if we have buffered output to emit, then rethrow the failure String buffer = outputBuffer.toString() @@ -74,12 +74,6 @@ public class AntTask extends DefaultTask { logger.error("=== Ant output ===\n${buffer}") } throw e - } finally { - project.ant.project.removeBuildListener(antLogger) - // add back the old loggers before returning - for (BuildLogger l : savedLoggers) { - project.ant.project.addBuildListener(l) - } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index de36af886b8..39cf55c905b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -35,16 +35,13 @@ public class LicenseHeadersTask extends AntTask { LicenseHeadersTask() { description = "Checks sources for missing, incorrect, or unacceptable license headers" - - if (ant.project.taskDefinitions.contains('ratReport') == false) { - ant.project.addTaskDefinition('ratReport', Report) - ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher) - ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily) - } } @Override protected void runAnt(AntBuilder ant) { + ant.project.addTaskDefinition('ratReport', Report) + ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher) + ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily) // create a file for the log to go to under reports/ File reportDir = new File(project.buildDir, "reports/licenseHeaders") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 9ff348f20bb..69db1e773e8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -18,24 +18,19 @@ */ package org.elasticsearch.gradle.precommit +import org.apache.tools.ant.BuildLogger import org.apache.tools.ant.DefaultLogger +import org.apache.tools.ant.Project import org.elasticsearch.gradle.AntTask import org.gradle.api.artifacts.Configuration +import org.gradle.api.file.FileCollection -import java.nio.file.Files import java.nio.file.FileVisitResult +import java.nio.file.Files import java.nio.file.Path import java.nio.file.SimpleFileVisitor import java.nio.file.attribute.BasicFileAttributes -import org.gradle.api.DefaultTask -import org.gradle.api.artifacts.UnknownConfigurationException -import org.gradle.api.file.FileCollection -import org.gradle.api.tasks.TaskAction - -import org.apache.tools.ant.BuildLogger -import org.apache.tools.ant.Project - /** * Basic static checking to keep tabs on third party JARs */ @@ -50,10 +45,6 @@ public class ThirdPartyAuditTask extends AntTask { ThirdPartyAuditTask() { dependsOn(project.configurations.testCompile) description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'" - - if (ant.project.taskDefinitions.contains('thirdPartyAudit') == false) { - ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask) - } } /** @@ -103,6 +94,8 @@ public class ThirdPartyAuditTask extends AntTask { @Override protected void runAnt(AntBuilder ant) { + ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask) + // we only want third party dependencies. FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> dependency.group.startsWith("org.elasticsearch") == false From 44edac0081edf8c889e9d7cb2ef50785056bac20 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 12:07:30 -0800 Subject: [PATCH 141/322] use better variable name for build listener, and change access back to private for internal method of thirdPartyAudit --- .../main/groovy/org/elasticsearch/gradle/AntTask.groovy | 8 ++++---- .../gradle/precommit/ThirdPartyAuditTask.groovy | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 7cd9b5b91a7..b713c00ed8d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -51,13 +51,13 @@ public class AntTask extends DefaultTask { // remove existing loggers, we add our own List toRemove = new ArrayList<>(); - for (BuildListener l : ant.project.getBuildListeners()) { + for (BuildListener listener : ant.project.getBuildListeners()) { if (l instanceof BuildLogger) { - toRemove.add(l); + toRemove.add(listener); } } - for (BuildLogger l : toRemove) { - ant.project.removeBuildListener(l) + for (BuildLogger listener : toRemove) { + ant.project.removeBuildListener(listener) } final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : (logger.isInfoEnabled() ? Project.MSG_INFO : Project.MSG_WARN) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 69db1e773e8..2ee4c29d614 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -167,7 +167,7 @@ public class ThirdPartyAuditTask extends AntTask { /** * check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk! */ - protected void checkSheistyClasses(Path root, Set excluded) { + private void checkSheistyClasses(Path root, Set excluded) { // system.parent = extensions loader. // note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!). // but groovy/gradle needs to work at all first! From 41139c6b15bac39f8decfb2a5cb90462e5f9a3f5 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 18 Dec 2015 14:59:16 -0500 Subject: [PATCH 142/322] Ensure profile results are immutable after serialization --- .../search/profile/InternalProfileShardResults.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java index e6052ff5095..e55a27be86e 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java @@ -62,8 +62,9 @@ public final class InternalProfileShardResults implements Writeable> getShardResults() { From 4700cb94097945d3a0087c6746746d662ea27f3e Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 18 Dec 2015 15:16:51 -0500 Subject: [PATCH 143/322] [TEST] Check length of profiled results in tests --- .../elasticsearch/search/profile/QueryProfilerIT.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java index bb33364a751..22772ff4a15 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -73,6 +73,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shard : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shard.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { @@ -195,6 +196,7 @@ public class QueryProfilerIT extends ESIntegTestCase { Map> p = resp.getProfileResults(); assertNotNull(p); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -240,6 +242,7 @@ public class QueryProfilerIT extends ESIntegTestCase { Map> p = resp.getProfileResults(); assertNotNull(p); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -307,6 +310,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -358,6 +362,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -404,6 +409,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -450,6 +456,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -495,6 +502,7 @@ public class QueryProfilerIT extends ESIntegTestCase { .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { @@ -549,6 +557,7 @@ public class QueryProfilerIT extends ESIntegTestCase { } assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { From 4ea19995cff603ce8472e985c902dd2f8fadebee Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 12:43:47 -0800 Subject: [PATCH 144/322] Remove wildcard imports --- .../junit4/RandomizedTestingTask.groovy | 6 +- .../gradle/junit4/TestProgressLogger.groovy | 3 +- .../gradle/junit4/TestReportLogger.groovy | 7 +- .../org/elasticsearch/gradle/AntTask.groovy | 8 +- .../elasticsearch/gradle/BuildPlugin.groovy | 9 +- .../elasticsearch/gradle/EmptyDirTask.groovy | 5 +- .../gradle/FileContentsTask.groovy | 5 +- .../gradle/plugin/PluginBuildPlugin.groovy | 1 - .../plugin/PluginPropertiesExtension.groovy | 1 - .../precommit/DependencyLicensesTask.groovy | 4 +- .../gradle/test/ClusterFormationTasks.groovy | 4 +- .../gradle/test/RestIntegTestTask.groovy | 2 - .../elasticsearch/gradle/test/RunTask.groovy | 1 - .../test/StandaloneTestBasePlugin.groovy | 1 - .../gradle/vagrant/BatsOverVagrantTask.groovy | 3 +- .../gradle/vagrant/VagrantCommandTask.groovy | 3 +- core/build.gradle | 2 +- .../lucene/queries/BlendedTermQuery.java | 14 +- .../classic/MapperQueryParser.java | 16 +- .../vectorhighlight/CustomFieldQuery.java | 12 +- .../elasticsearch/ElasticsearchException.java | 8 +- .../elasticsearch/action/ActionFuture.java | 2 - .../elasticsearch/action/ActionModule.java | 8 +- .../action/DocWriteResponse.java | 1 - .../action/NoSuchNodeException.java | 1 - .../action/TransportActionNodeProxy.java | 3 +- .../action/UnavailableShardsException.java | 3 +- .../cluster/health/ClusterHealthResponse.java | 2 +- .../health/TransportClusterHealthAction.java | 6 +- .../NodesHotThreadsRequestBuilder.java | 2 - .../cluster/node/info/NodesInfoResponse.java | 1 - .../liveness/TransportLivenessAction.java | 4 +- .../admin/cluster/node/stats/NodeStats.java | 3 +- .../verify/VerifyRepositoryResponse.java | 7 +- .../reroute/ClusterRerouteResponse.java | 1 - .../TransportClusterUpdateSettingsAction.java | 2 - .../shards/ClusterSearchShardsResponse.java | 1 - .../delete/DeleteSnapshotRequestBuilder.java | 1 - .../TransportSnapshotsStatusAction.java | 1 - .../state/TransportClusterStateAction.java | 1 - .../stats/ClusterStatsNodeResponse.java | 4 +- .../cluster/stats/ClusterStatsNodes.java | 2 +- .../cluster/stats/ClusterStatsResponse.java | 4 +- .../stats/TransportClusterStatsAction.java | 2 +- .../analyze/TransportAnalyzeAction.java | 16 +- .../indices/create/CreateIndexRequest.java | 2 +- .../forcemerge/ForceMergeResponse.java | 3 - .../admin/indices/get/GetIndexResponse.java | 1 - .../put/TransportUpdateSettingsAction.java | 2 - .../shards/IndicesShardStoresRequest.java | 2 +- .../shards/IndicesShardStoresResponse.java | 2 +- .../TransportIndicesShardStoresAction.java | 4 +- .../indices/stats/IndicesStatsResponse.java | 1 - .../admin/indices/stats/ShardStats.java | 1 - .../warmer/get/GetWarmersResponse.java | 1 - .../warmer/put/TransportPutWarmerAction.java | 2 - .../action/bulk/BulkItemRequest.java | 1 - .../action/bulk/BulkProcessor.java | 5 +- .../org/elasticsearch/action/bulk/Retry.java | 2 +- .../action/explain/ExplainResponse.java | 1 - .../TransportFieldStatsTransportAction.java | 9 +- .../action/get/MultiGetRequest.java | 14 +- .../action/get/TransportMultiGetAction.java | 1 - .../action/index/IndexRequest.java | 12 +- .../get/GetIndexedScriptResponse.java | 1 - .../put/PutIndexedScriptRequest.java | 2 +- .../action/percolate/PercolateResponse.java | 6 +- .../percolate/PercolateSourceBuilder.java | 1 - .../TransportMultiPercolateAction.java | 13 +- .../search/SearchPhaseExecutionException.java | 4 +- .../action/search/ShardSearchFailure.java | 3 - .../action/search/TransportSearchAction.java | 8 +- .../search/TransportSearchScrollAction.java | 3 +- ...ransportSearchDfsQueryThenFetchAction.java | 1 - .../TransportSearchQueryThenFetchAction.java | 1 - ...sportSearchScrollQueryThenFetchAction.java | 1 - .../type/TransportSearchTypeAction.java | 1 - .../action/support/TransportAction.java | 6 +- .../support/broadcast/BroadcastRequest.java | 2 - .../broadcast/TransportBroadcastAction.java | 6 +- .../support/nodes/TransportNodesAction.java | 8 +- .../TransportReplicationAction.java | 16 +- .../InstanceShardOperationRequestBuilder.java | 1 - ...ransportInstanceSingleOperationAction.java | 11 +- .../shard/TransportSingleShardAction.java | 6 +- .../termvectors/MultiTermVectorsAction.java | 1 - .../termvectors/MultiTermVectorsRequest.java | 15 +- .../action/termvectors/TermVectorsFields.java | 7 +- .../action/termvectors/TermVectorsFilter.java | 14 +- .../TermVectorsRequestBuilder.java | 1 - .../action/termvectors/TermVectorsWriter.java | 8 +- .../elasticsearch/bootstrap/JNACLibrary.java | 9 +- .../bootstrap/JNAKernel32Library.java | 7 +- .../elasticsearch/bootstrap/JNANatives.java | 9 +- .../org/elasticsearch/bootstrap/Seccomp.java | 53 +- .../org/elasticsearch/bootstrap/Security.java | 23 +- .../cache/recycler/PageCacheRecycler.java | 5 +- .../java/org/elasticsearch/client/Client.java | 35 +- .../client/ClusterAdminClient.java | 3 +- .../client/ElasticsearchClient.java | 7 +- .../elasticsearch/client/FilterClient.java | 6 +- .../client/IndicesAdminClient.java | 13 +- .../org/elasticsearch/client/Requests.java | 4 +- .../elasticsearch/client/node/NodeClient.java | 7 +- .../client/support/AbstractClient.java | 73 ++- .../cluster/AbstractDiffable.java | 2 +- .../elasticsearch/cluster/ClusterModule.java | 6 +- .../elasticsearch/cluster/ClusterState.java | 13 +- .../org/elasticsearch/cluster/Diffable.java | 2 +- .../elasticsearch/cluster/DiffableUtils.java | 1 - ...ompatibleClusterStateVersionException.java | 1 - .../cluster/RestoreInProgress.java | 1 - .../cluster/SnapshotsInProgress.java | 1 - .../action/index/NodeIndexDeletedAction.java | 9 +- .../index/NodeMappingRefreshAction.java | 7 +- .../cluster/health/ClusterStateHealth.java | 6 +- .../cluster/metadata/IndexMetaData.java | 10 +- .../cluster/metadata/MetaData.java | 1 - .../metadata/MetaDataCreateIndexService.java | 10 +- .../metadata/MetaDataIndexAliasesService.java | 6 +- .../metadata/MetaDataIndexStateService.java | 5 - .../MetaDataIndexTemplateService.java | 8 +- .../metadata/MetaDataIndexUpgradeService.java | 1 - .../metadata/MetaDataMappingService.java | 14 +- .../MetaDataUpdateSettingsService.java | 14 +- .../cluster/node/DiscoveryNode.java | 1 - .../cluster/node/DiscoveryNodes.java | 1 - .../cluster/routing/GroupShardsIterator.java | 7 +- .../cluster/routing/IndexRoutingTable.java | 8 +- .../cluster/routing/RoutingNode.java | 17 +- .../cluster/routing/RoutingService.java | 6 +- .../cluster/routing/RoutingTable.java | 1 - .../routing/allocation/AllocationService.java | 7 +- .../allocator/BalancedShardsAllocator.java | 3 +- .../allocation/command/AllocationCommand.java | 9 +- .../command/MoveAllocationCommand.java | 2 +- .../decider/AwarenessAllocationDecider.java | 2 +- .../decider/DiskThresholdDecider.java | 2 +- .../SnapshotInProgressAllocationDecider.java | 2 +- .../decider/ThrottlingAllocationDecider.java | 1 - .../cluster/settings/DynamicSettings.java | 1 - .../DelegatingHasContextAndHeaders.java | 1 - .../org/elasticsearch/common/Nullable.java | 8 +- .../org/elasticsearch/common/PidFile.java | 2 +- .../common/SuppressForbidden.java | 2 +- .../common/bytes/BytesArray.java | 2 +- .../bytes/ChannelBufferBytesReference.java | 2 +- .../bytes/ReleasablePagedBytesReference.java | 1 - .../org/elasticsearch/common/cache/Cache.java | 6 +- .../elasticsearch/common/cli/Terminal.java | 6 +- .../common/collect/CopyOnWriteHashMap.java | 12 +- .../common/collect/ImmutableOpenIntMap.java | 8 +- .../common/collect/ImmutableOpenMap.java | 11 +- .../common/component/LifecycleComponent.java | 3 - .../elasticsearch/common/geo/GeoDistance.java | 1 + .../elasticsearch/common/geo/GeoUtils.java | 73 ++- .../common/geo/XShapeCollection.java | 2 - .../common/geo/builders/CircleBuilder.java | 1 - .../common/geo/builders/EnvelopeBuilder.java | 2 +- .../builders/GeometryCollectionBuilder.java | 3 +- .../geo/builders/LineStringBuilder.java | 17 +- .../geo/builders/MultiLineStringBuilder.java | 7 +- .../geo/builders/MultiPointBuilder.java | 2 +- .../geo/builders/MultiPolygonBuilder.java | 15 +- .../common/geo/builders/PointBuilder.java | 9 +- .../common/geo/builders/PointCollection.java | 7 +- .../common/geo/builders/PolygonBuilder.java | 3 +- .../common/geo/builders/ShapeBuilder.java | 7 +- .../http/client/HttpDownloadHelper.java | 6 +- .../common/inject/AbstractProcessor.java | 12 +- .../common/inject/ConstantFactory.java | 6 +- .../DefaultConstructionProxyFactory.java | 1 - .../common/inject/ExposedKeyFactory.java | 6 +- .../common/inject/FactoryProxy.java | 6 +- .../elasticsearch/common/inject/Inject.java | 4 +- .../InternalFactoryToProviderAdapter.java | 6 +- .../common/inject/TypeLiteral.java | 10 +- .../inject/assistedinject/Assisted.java | 4 +- .../inject/assistedinject/Parameter.java | 6 +- .../common/inject/internal/Nullable.java | 6 +- .../common/inject/spi/Elements.java | 30 +- .../common/io/FileSystemUtils.java | 12 +- .../org/elasticsearch/common/io/Streams.java | 3 +- .../common/joda/FormatDateTimeFormatter.java | 10 +- .../org/elasticsearch/common/joda/Joda.java | 18 +- .../elasticsearch/common/lucene/Lucene.java | 39 +- .../common/lucene/MinimumScoreCollector.java | 8 +- .../common/lucene/ShardCoreKeyMap.java | 8 +- .../index/ElasticsearchDirectoryReader.java | 5 +- .../lucene/index/ElasticsearchLeafReader.java | 3 +- .../common/lucene/search/EmptyScorer.java | 1 - .../lucene/search/MoreLikeThisQuery.java | 12 +- .../lucene/search/MultiPhrasePrefixQuery.java | 12 +- .../common/lucene/search/Queries.java | 8 +- .../common/lucene/search/XMoreLikeThis.java | 25 +- .../function/FiltersFunctionScoreQuery.java | 7 +- .../search/function/FunctionScoreQuery.java | 8 +- .../uid/PerThreadIDAndVersionLookup.java | 4 +- .../common/lucene/uid/Versions.java | 1 - .../elasticsearch/common/metrics/EWMA.java | 5 +- .../common/metrics/MeterMetric.java | 2 +- .../common/netty/OpenChannelsHandler.java | 10 +- .../settings/AbstractScopedSettings.java | 6 +- .../common/settings/ClusterSettings.java | 16 +- .../common/settings/Setting.java | 4 +- .../org/elasticsearch/common/text/Text.java | 3 +- .../transport/LocalTransportAddress.java | 2 - .../common/transport/TransportAddress.java | 1 - .../common/unit/MemorySizeValue.java | 4 +- .../elasticsearch/common/unit/TimeValue.java | 1 - .../common/util/BytesRefHash.java | 1 - .../common/util/CancellableThreads.java | 1 - .../common/util/CollectionUtils.java | 17 +- .../common/util/ExtensionPoint.java | 6 +- .../common/util/LongObjectPagedHashMap.java | 1 - .../common/util/MultiDataPathUpgrader.java | 14 +- .../common/util/SingleObjectCache.java | 1 - .../common/util/concurrent/BaseFuture.java | 6 +- .../common/util/concurrent/EsExecutors.java | 7 +- .../util/concurrent/PrioritizedRunnable.java | 3 +- .../common/util/iterable/Iterables.java | 7 +- .../common/xcontent/ObjectParser.java | 9 +- .../common/xcontent/XContent.java | 5 +- .../common/xcontent/XContentBuilder.java | 3 +- .../common/xcontent/XContentFactory.java | 1 - .../common/xcontent/XContentHelper.java | 2 +- .../common/xcontent/cbor/CborXContent.java | 11 +- .../common/xcontent/json/JsonXContent.java | 11 +- .../xcontent/json/JsonXContentGenerator.java | 8 +- .../xcontent/json/JsonXContentParser.java | 1 - .../common/xcontent/smile/SmileXContent.java | 11 +- .../support/AbstractXContentParser.java | 26 +- .../common/xcontent/yaml/YamlXContent.java | 11 +- .../discovery/local/LocalDiscovery.java | 16 +- .../discovery/zen/NodeJoinController.java | 6 +- .../discovery/zen/ZenDiscovery.java | 20 +- .../zen/fd/MasterFaultDetection.java | 12 +- .../discovery/zen/fd/NodesFaultDetection.java | 10 +- .../zen/membership/MembershipAction.java | 7 +- .../discovery/zen/ping/ZenPing.java | 1 - .../publish/PublishClusterStateAction.java | 22 +- .../java/org/elasticsearch/env/ShardLock.java | 3 - .../gateway/AsyncShardFetch.java | 1 - .../org/elasticsearch/gateway/Gateway.java | 6 +- .../gateway/GatewayAllocator.java | 1 - .../elasticsearch/gateway/GatewayService.java | 6 +- .../gateway/MetaDataStateFormat.java | 18 +- .../gateway/PrimaryShardAllocator.java | 8 +- ...ransportNodesListGatewayStartedShards.java | 1 - .../http/netty/HttpRequestHandler.java | 6 +- .../http/netty/NettyHttpChannel.java | 28 +- .../http/netty/NettyHttpServerTransport.java | 36 +- .../pipelining/HttpPipeliningHandler.java | 13 +- .../OrderedDownstreamChannelEvent.java | 6 +- .../index/AbstractIndexComponent.java | 3 +- .../index/CompositeIndexEventListener.java | 2 +- .../org/elasticsearch/index/IndexModule.java | 6 +- .../index/NodeServicesProvider.java | 1 - .../index/analysis/Analysis.java | 9 +- .../index/analysis/AnalysisRegistry.java | 7 +- .../index/analysis/AnalysisService.java | 3 - .../DelimitedPayloadTokenFilterFactory.java | 16 +- .../index/analysis/FieldNameAnalyzer.java | 1 - .../analysis/HtmlStripCharFilterFactory.java | 2 - .../analysis/KeepTypesFilterFactory.java | 4 +- .../index/analysis/KeepWordFilterFactory.java | 2 +- .../KeywordMarkerTokenFilterFactory.java | 2 +- .../index/analysis/NumericLongTokenizer.java | 3 +- .../index/analysis/PatternAnalyzer.java | 4 +- ...PatternCaptureGroupTokenFilterFactory.java | 2 +- .../PatternReplaceTokenFilterFactory.java | 2 +- .../index/analysis/SnowballAnalyzer.java | 8 +- .../analysis/StandardAnalyzerProvider.java | 2 +- .../analysis/StandardHtmlStripAnalyzer.java | 2 +- .../StemmerOverrideTokenFilterFactory.java | 4 +- .../analysis/StemmerTokenFilterFactory.java | 35 +- .../analysis/SynonymTokenFilterFactory.java | 2 +- .../index/analysis/TokenizerFactory.java | 2 - .../analysis/TruncateTokenFilterFactory.java | 2 +- .../analysis/UniqueTokenFilterFactory.java | 2 +- .../WordDelimiterTokenFilterFactory.java | 21 +- ...tionaryCompoundWordTokenFilterFactory.java | 6 +- ...enationCompoundWordTokenFilterFactory.java | 6 +- .../index/cache/query/QueryCacheStats.java | 3 +- .../index/codec/CodecService.java | 1 + ...ElasticsearchConcurrentMergeScheduler.java | 8 +- .../elasticsearch/index/engine/Engine.java | 20 +- .../index/engine/EngineSearcher.java | 1 - .../index/engine/EngineSearcherFactory.java | 1 - .../index/engine/InternalEngine.java | 22 +- .../index/engine/RecoveryCounter.java | 1 - .../index/fielddata/FieldData.java | 7 +- .../index/fielddata/IndexFieldDataCache.java | 1 - .../fielddata/IndexFieldDataService.java | 2 +- .../fielddata/RamAccountingTermsEnum.java | 2 +- .../fielddata/SortingBinaryDocValues.java | 7 +- .../BytesRefFieldComparatorSource.java | 20 +- .../LongValuesComparatorSource.java | 1 - .../ordinals/GlobalOrdinalsBuilder.java | 4 +- .../fielddata/ordinals/OrdinalsBuilder.java | 8 +- .../AbstractGeoPointDVIndexFieldData.java | 6 +- .../plain/AbstractIndexFieldData.java | 6 +- .../plain/AbstractIndexGeoPointFieldData.java | 5 +- .../plain/AbstractIndexOrdinalsFieldData.java | 12 +- .../plain/BytesBinaryDVIndexFieldData.java | 2 +- .../plain/DisabledIndexFieldData.java | 7 +- .../plain/GeoPointArrayAtomicFieldData.java | 1 - .../fielddata/plain/IndexIndexFieldData.java | 6 +- .../plain/PagedBytesIndexFieldData.java | 14 +- .../plain/ParentChildIndexFieldData.java | 24 +- .../plain/SortedNumericDVIndexFieldData.java | 68 +- .../SortedSetDVOrdinalsIndexFieldData.java | 8 +- .../index/fieldvisitor/FieldsVisitor.java | 1 - .../fieldvisitor/SingleFieldsVisitor.java | 1 - .../index/mapper/DocumentFieldMappers.java | 1 - .../index/mapper/DocumentMapper.java | 1 - .../index/mapper/DocumentMapperParser.java | 4 +- .../index/mapper/FieldMapper.java | 1 - .../index/mapper/MappedFieldType.java | 9 +- .../index/mapper/MapperBuilders.java | 13 +- .../index/mapper/MapperService.java | 24 +- .../index/mapper/core/BinaryFieldMapper.java | 2 - .../index/mapper/core/ByteFieldMapper.java | 1 + .../mapper/core/CompletionFieldMapper.java | 15 +- .../index/mapper/core/IntegerFieldMapper.java | 1 + .../index/mapper/core/LongFieldMapper.java | 1 + .../index/mapper/core/NumberFieldMapper.java | 14 +- .../index/mapper/core/ShortFieldMapper.java | 1 + .../index/mapper/core/StringFieldMapper.java | 2 +- .../index/mapper/core/TypeParsers.java | 1 - .../mapper/geo/BaseGeoPointFieldMapper.java | 3 +- .../mapper/internal/TimestampFieldMapper.java | 2 +- .../mapper/internal/TypeFieldMapper.java | 2 - .../index/mapper/ip/IpFieldMapper.java | 1 - .../index/mapper/object/ObjectMapper.java | 15 +- .../index/mapper/object/RootObjectMapper.java | 13 +- .../elasticsearch/index/merge/MergeStats.java | 1 - .../index/query/BoostingQueryBuilder.java | 2 +- .../index/query/ExistsQueryBuilder.java | 6 +- .../query/FieldMaskingSpanQueryParser.java | 1 + .../index/query/IdsQueryBuilder.java | 7 +- .../index/query/MultiMatchQueryParser.java | 1 - .../index/query/QueryStringQueryBuilder.java | 7 +- .../elasticsearch/index/query/RegexpFlag.java | 4 +- .../index/query/ScriptQueryBuilder.java | 6 +- .../index/query/SimpleQueryParser.java | 9 +- .../index/query/TemplateQueryParser.java | 3 +- .../functionscore/DecayFunctionBuilder.java | 6 +- .../functionscore/DecayFunctionParser.java | 2 +- .../FunctionScoreQueryBuilder.java | 6 +- .../FunctionScoreQueryParser.java | 9 +- .../functionscore/ScoreFunctionParser.java | 2 +- .../FieldValueFactorFunctionParser.java | 2 +- .../random/RandomScoreFunctionParser.java | 2 +- .../script/ScriptScoreFunctionParser.java | 4 +- .../support/NestedInnerQueryParseSupport.java | 2 +- .../index/query/support/QueryInnerHits.java | 7 +- .../index/search/MatchQuery.java | 9 +- .../index/search/MultiMatchQuery.java | 6 +- .../geo/IndexedGeoBoundingBoxQuery.java | 1 - .../index/search/stats/SearchSlowLog.java | 2 - .../IllegalIndexShardStateException.java | 1 - .../index/shard/ShadowIndexShard.java | 4 +- .../index/shard/ShardStateMetaData.java | 1 - .../elasticsearch/index/shard/ShardUtils.java | 3 +- .../shard/TranslogRecoveryPerformer.java | 6 +- .../AbstractSimilarityProvider.java | 6 +- .../similarity/DFRSimilarityProvider.java | 2 - .../LMJelinekMercerSimilarityProvider.java | 2 - .../index/similarity/SimilarityProvider.java | 1 - .../snapshots/IndexShardSnapshotStatus.java | 3 - .../BlobStoreIndexShardRepository.java | 1 - .../index/store/DirectoryUtils.java | 3 +- .../index/store/FsDirectoryService.java | 12 +- .../org/elasticsearch/index/store/Store.java | 42 +- .../elasticsearch/index/store/StoreStats.java | 2 - .../index/store/VerifyingIndexOutput.java | 8 +- .../index/termvectors/TermVectorsService.java | 25 +- .../index/translog/ChannelReference.java | 2 +- .../index/translog/LegacyTranslogReader.java | 2 - .../index/translog/Translog.java | 12 +- .../index/translog/TranslogWriter.java | 5 +- .../elasticsearch/indices/IndicesService.java | 16 +- .../elasticsearch/indices/IndicesWarmer.java | 1 - .../indices/analysis/AnalysisModule.java | 10 +- .../indices/analysis/HunspellService.java | 6 +- .../indices/analysis/PreBuiltAnalyzers.java | 6 +- .../analysis/PreBuiltTokenFilters.java | 21 +- .../indices/analysis/PreBuiltTokenizers.java | 6 +- .../cache/request/IndicesRequestCache.java | 12 +- .../cluster/IndicesClusterStateService.java | 19 +- .../cache/IndicesFieldDataCacheListener.java | 2 - .../memory/IndexingMemoryController.java | 4 +- .../indices/recovery/RecoverySource.java | 7 +- .../recovery/RecoverySourceHandler.java | 8 +- .../indices/recovery/RecoveryTarget.java | 15 +- .../indices/store/IndicesStore.java | 16 +- .../TransportNodesListShardStoreMetaData.java | 8 +- .../indices/ttl/IndicesTTLService.java | 4 +- .../elasticsearch/monitor/jvm/HotThreads.java | 9 +- .../elasticsearch/monitor/jvm/JvmStats.java | 10 +- .../internal/InternalSettingsPreparer.java | 3 +- .../MultiDocumentPercolatorIndex.java | 7 +- .../percolator/PercolateContext.java | 2 - .../percolator/PercolatorService.java | 1 - .../percolator/QueryCollector.java | 10 +- .../org/elasticsearch/plugins/Plugin.java | 3 - .../elasticsearch/plugins/PluginManager.java | 26 +- .../elasticsearch/plugins/PluginsService.java | 1 - .../RepositoryMissingException.java | 1 - .../RepositoryVerificationException.java | 1 - .../VerifyNodeRepositoryAction.java | 1 - .../blobstore/BlobStoreRepository.java | 2 - .../blobstore/ChecksumBlobStoreFormat.java | 11 +- .../repositories/fs/FsRepository.java | 1 - .../elasticsearch/rest/BaseRestHandler.java | 6 +- .../elasticsearch/rest/BytesRestResponse.java | 2 - .../org/elasticsearch/rest/RestFilter.java | 2 - .../health/RestClusterHealthAction.java | 7 +- .../hotthreads/RestNodesHotThreadsAction.java | 8 +- .../node/stats/RestNodesStatsAction.java | 5 +- .../delete/RestDeleteRepositoryAction.java | 5 +- .../get/RestGetRepositoriesAction.java | 7 +- .../put/RestPutRepositoryAction.java | 5 +- .../verify/RestVerifyRepositoryAction.java | 5 +- .../reroute/RestClusterRerouteAction.java | 5 +- .../RestClusterGetSettingsAction.java | 8 +- .../RestClusterUpdateSettingsAction.java | 5 +- .../shards/RestClusterSearchShardsAction.java | 5 +- .../create/RestCreateSnapshotAction.java | 5 +- .../delete/RestDeleteSnapshotAction.java | 5 +- .../snapshots/get/RestGetSnapshotsAction.java | 5 +- .../restore/RestRestoreSnapshotAction.java | 5 +- .../status/RestSnapshotsStatusAction.java | 5 +- .../cluster/state/RestClusterStateAction.java | 8 +- .../cluster/stats/RestClusterStatsAction.java | 5 +- .../tasks/RestPendingClusterTasksAction.java | 5 +- .../alias/RestIndicesAliasesAction.java | 5 +- .../delete/RestIndexDeleteAliasesAction.java | 5 +- .../alias/get/RestGetAliasesAction.java | 8 +- .../get/RestGetIndicesAliasesAction.java | 9 +- .../alias/head/RestAliasesExistAction.java | 6 +- .../clear/RestClearIndicesCacheAction.java | 7 +- .../indices/close/RestCloseIndexAction.java | 5 +- .../indices/create/RestCreateIndexAction.java | 5 +- .../indices/delete/RestDeleteIndexAction.java | 5 +- .../indices/RestIndicesExistsAction.java | 7 +- .../exists/types/RestTypesExistsAction.java | 7 +- .../admin/indices/flush/RestFlushAction.java | 7 +- .../indices/flush/RestSyncedFlushAction.java | 7 +- .../forcemerge/RestForceMergeAction.java | 8 +- .../indices/get/RestGetIndicesAction.java | 2 - .../mapping/get/RestGetMappingAction.java | 7 +- .../mapping/put/RestPutMappingAction.java | 7 +- .../indices/open/RestOpenIndexAction.java | 5 +- .../indices/recovery/RestRecoveryAction.java | 7 +- .../indices/refresh/RestRefreshAction.java | 7 +- .../segments/RestIndicesSegmentsAction.java | 7 +- .../settings/RestGetSettingsAction.java | 7 +- .../settings/RestUpdateSettingsAction.java | 1 - .../shards/RestIndicesShardStoresAction.java | 9 +- .../indices/stats/RestIndicesStatsAction.java | 7 +- .../delete/RestDeleteIndexTemplateAction.java | 5 +- .../head/RestHeadIndexTemplateAction.java | 7 +- .../put/RestPutIndexTemplateAction.java | 5 +- .../query/RestValidateQueryAction.java | 7 +- .../warmer/delete/RestDeleteWarmerAction.java | 5 +- .../warmer/get/RestGetWarmerAction.java | 7 +- .../rest/action/bulk/RestBulkAction.java | 7 +- .../rest/action/cat/AbstractCatAction.java | 7 +- .../rest/action/cat/RestAliasAction.java | 5 +- .../rest/action/cat/RestAllocationAction.java | 6 +- .../rest/action/cat/RestCatAction.java | 7 +- .../rest/action/cat/RestFielddataAction.java | 7 +- .../rest/action/cat/RestIndicesAction.java | 7 +- .../rest/action/cat/RestMasterAction.java | 5 +- .../rest/action/cat/RestNodeAttrsAction.java | 2 +- .../rest/action/cat/RestNodesAction.java | 5 +- .../cat/RestPendingClusterTasksAction.java | 5 +- .../rest/action/cat/RestPluginsAction.java | 7 +- .../rest/action/cat/RestShardsAction.java | 5 +- .../rest/action/count/RestCountAction.java | 7 +- .../rest/action/delete/RestDeleteAction.java | 8 +- .../action/explain/RestExplainAction.java | 7 +- .../fieldstats/RestFieldStatsAction.java | 10 +- .../rest/action/get/RestGetAction.java | 7 +- .../rest/action/get/RestGetSourceAction.java | 7 +- .../rest/action/get/RestHeadAction.java | 7 +- .../rest/action/get/RestMultiGetAction.java | 5 +- .../rest/action/index/RestIndexAction.java | 10 +- .../rest/action/main/RestMainAction.java | 7 +- .../percolate/RestMultiPercolateAction.java | 5 +- .../action/percolate/RestPercolateAction.java | 5 +- .../script/RestDeleteIndexedScriptAction.java | 10 +- .../script/RestGetIndexedScriptAction.java | 8 +- .../script/RestPutIndexedScriptAction.java | 12 +- .../action/search/RestClearScrollAction.java | 5 +- .../action/search/RestSearchScrollAction.java | 5 +- .../action/suggest/RestSuggestAction.java | 7 +- .../support/AcknowledgedRestListener.java | 3 - .../action/support/RestActionListener.java | 2 - .../rest/action/support/RestActions.java | 13 +- .../action/support/RestResponseListener.java | 6 - .../rest/action/support/RestTable.java | 11 +- .../template/RestPutSearchTemplateAction.java | 5 +- .../RestMultiTermVectorsAction.java | 5 +- .../rest/action/update/RestUpdateAction.java | 6 - .../script/AbstractSearchScript.java | 8 +- .../script/ScriptParameterParser.java | 13 +- .../elasticsearch/search/MultiValueMode.java | 7 +- .../elasticsearch/search/SearchModule.java | 11 +- .../search/SearchParseException.java | 5 +- .../org/elasticsearch/search/SearchPhase.java | 1 - .../elasticsearch/search/SearchService.java | 20 +- .../search/SearchShardTarget.java | 1 - .../action/SearchServiceTransportAction.java | 12 +- .../bucket/DeferringBucketCollector.java | 3 +- .../children/ParentToChildrenAggregator.java | 8 +- .../bucket/filter/FilterParser.java | 1 - .../bucket/filters/InternalFilters.java | 1 - .../bucket/geogrid/GeoHashGridAggregator.java | 5 +- .../bucket/geogrid/GeoHashGridParser.java | 5 +- .../bucket/geogrid/InternalGeoHashGrid.java | 3 +- .../nested/ReverseNestedAggregator.java | 5 +- .../bucket/range/ipv4/InternalIPv4Range.java | 2 - .../bucket/range/ipv4/IpRangeParser.java | 1 - .../bucket/sampler/SamplerAggregator.java | 18 +- .../SignificantTermsAggregatorFactory.java | 14 +- .../heuristics/ScriptHeuristic.java | 6 +- .../SignificanceHeuristicParser.java | 2 +- .../SignificanceHeuristicStreams.java | 2 +- .../bucket/terms/StringTermsAggregator.java | 6 +- .../bucket/terms/TermsAggregatorFactory.java | 6 +- .../bucket/terms/support/IncludeExclude.java | 9 +- .../geocentroid/GeoCentroidAggregator.java | 1 - .../AbstractPercentilesParser.java | 1 - .../scripted/ScriptedMetricBuilder.java | 1 - .../pipeline/PipelineAggregatorBuilders.java | 4 +- .../bucketmetrics/BucketMetricsBuilder.java | 4 +- .../bucketmetrics/BucketMetricsParser.java | 1 - .../bucketmetrics/max/MaxBucketParser.java | 2 +- .../PercentilesBucketPipelineAggregator.java | 5 +- .../BucketScriptPipelineAggregator.java | 6 +- .../derivative/DerivativeBuilder.java | 2 +- .../movavg/MovAvgPipelineAggregator.java | 1 - .../movavg/models/HoltLinearModel.java | 3 +- .../movavg/models/HoltWintersModel.java | 6 +- .../pipeline/movavg/models/MovAvgModel.java | 1 - .../aggregations/support/FieldContext.java | 1 - .../aggregations/support/ValuesSource.java | 8 +- .../search/builder/SearchSourceBuilder.java | 1 - .../controller/SearchPhaseController.java | 2 +- .../search/dfs/AggregatedDfs.java | 7 +- .../elasticsearch/search/dfs/DfsPhase.java | 1 - .../search/dfs/DfsSearchResult.java | 21 +- .../search/fetch/FetchPhase.java | 9 +- .../search/fetch/FieldsParseElement.java | 2 - .../search/fetch/QueryFetchSearchResult.java | 1 - .../search/fetch/ShardFetchRequest.java | 2 - .../FieldDataFieldsParseElement.java | 2 - .../fetch/innerhits/InnerHitsContext.java | 14 +- .../search/highlight/Highlighters.java | 6 +- .../FragmentBuilderHelper.java | 21 +- .../SourceScoreOrderFragmentsBuilder.java | 5 +- .../SourceSimpleFragmentsBuilder.java | 5 +- .../search/internal/ContextIndexSearcher.java | 9 +- .../search/internal/DefaultSearchContext.java | 17 +- .../search/internal/InternalSearchHit.java | 3 +- .../search/internal/SearchContext.java | 5 +- .../search/lookup/FieldLookup.java | 1 - .../search/lookup/IndexField.java | 3 +- .../profile/InternalProfileShardResults.java | 6 +- .../search/profile/InternalProfileTree.java | 6 +- .../search/profile/ProfileShardResult.java | 4 +- .../search/profile/Profiler.java | 4 +- .../search/query/QueryPhase.java | 5 +- .../search/sort/GeoDistanceSortParser.java | 1 - .../elasticsearch/search/suggest/Suggest.java | 42 +- .../search/suggest/SuggestUtils.java | 57 +- .../search/suggest/Suggesters.java | 6 +- .../completion/CompletionFieldStats.java | 1 - .../completion/CompletionSuggestParser.java | 5 +- .../completion/CompletionSuggester.java | 10 +- .../completion/CompletionSuggestion.java | 11 +- .../CompletionSuggestionBuilder.java | 14 +- .../CompletionSuggestionContext.java | 6 +- .../context/CategoryContextMapping.java | 8 +- .../context/CategoryQueryContext.java | 5 +- .../completion/context/ContextMapping.java | 4 +- .../completion/context/ContextMappings.java | 14 +- .../completion/context/GeoContextMapping.java | 22 +- .../completion/context/GeoQueryContext.java | 5 +- .../suggest/phrase/CandidateGenerator.java | 8 +- .../suggest/phrase/CandidateScorer.java | 10 +- .../phrase/DirectCandidateGenerator.java | 43 +- .../MultiCandidateGeneratorWrapper.java | 16 +- .../phrase/PhraseSuggestionContext.java | 34 +- .../snapshots/RestoreService.java | 49 +- .../org/elasticsearch/snapshots/Snapshot.java | 6 +- .../snapshots/SnapshotCreationException.java | 1 - .../elasticsearch/snapshots/SnapshotInfo.java | 12 +- .../snapshots/SnapshotShardsService.java | 1 - .../snapshots/SnapshotsService.java | 1 - .../elasticsearch/threadpool/ThreadPool.java | 19 +- .../transport/RequestHandlerRegistry.java | 2 - .../transport/TransportFuture.java | 2 - .../transport/TransportService.java | 6 +- .../local/LocalTransportChannel.java | 6 +- .../netty/MessageChannelHandler.java | 18 +- .../netty/NettyTransportChannel.java | 7 +- .../lucene/queries/BlendedTermQueryTests.java | 21 +- .../org/elasticsearch/ESExceptionTests.java | 1 - .../ExceptionSerializationTests.java | 8 +- .../elasticsearch/NamingConventionTests.java | 1 - .../health/ClusterHealthResponsesTests.java | 4 +- .../admin/cluster/stats/ClusterStatsIT.java | 2 +- .../indices/flush/SyncedFlushUnitTests.java | 1 - .../shards/IndicesShardStoreRequestIT.java | 1 - .../IndicesShardStoreResponseTests.java | 12 +- .../indices/stats/IndicesStatsTests.java | 8 +- .../MetaDataIndexTemplateServiceTests.java | 7 +- .../admin/indices/upgrade/UpgradeIT.java | 11 +- .../action/bulk/BulkProcessorIT.java | 1 - .../action/bulk/BulkRequestTests.java | 7 +- .../elasticsearch/action/bulk/RetryTests.java | 4 +- .../fieldstats/FieldStatsRequestTests.java | 5 +- .../action/index/IndexRequestTests.java | 6 +- .../master/IndexingMasterFailoverIT.java | 1 - .../TransportMasterNodeActionTests.java | 4 +- .../BroadcastReplicationTests.java | 2 +- .../ClusterStateCreationUtils.java | 12 +- .../TransportReplicationActionTests.java | 5 +- .../AbstractTermVectorsTestCase.java | 22 +- .../GetTermVectorsCheckDocFreqIT.java | 1 - .../action/termvectors/GetTermVectorsIT.java | 1 - .../action/update/UpdateRequestTests.java | 6 +- .../BasicAnalysisBackwardCompatibilityIT.java | 1 - .../BasicBackwardsCompatibilityIT.java | 5 +- .../OldIndexBackwardsCompatibilityIT.java | 14 +- .../transport/FailAndRetryMockTransport.java | 10 +- .../cluster/ClusterInfoServiceIT.java | 1 - .../cluster/ClusterModuleTests.java | 6 +- .../cluster/ClusterStateDiffIT.java | 15 +- .../cluster/MinimumMasterNodesIT.java | 16 +- .../org/elasticsearch/cluster/ack/AckIT.java | 1 - .../allocation/AwarenessAllocationIT.java | 1 - .../cluster/allocation/ClusterRerouteIT.java | 4 +- .../allocation/ShardsAllocatorModuleIT.java | 2 +- .../health/ClusterStateHealthTests.java | 7 +- .../cluster/health/RoutingTableGenerator.java | 6 +- .../node/DiscoveryNodeFiltersTests.java | 6 +- .../routing/RandomShardRoutingMutator.java | 4 +- .../cluster/routing/UnassignedInfoTests.java | 9 +- .../allocation/BalanceConfigurationTests.java | 3 +- .../ClusterRebalanceRoutingTests.java | 7 +- .../NodeVersionAllocationDeciderTests.java | 9 +- .../decider/DiskThresholdDeciderTests.java | 18 +- .../DiskThresholdDeciderUnitTests.java | 14 +- .../decider/EnableAllocationTests.java | 3 +- .../allocation/decider/MockDiskUsagesIT.java | 2 - .../cluster/settings/ClusterSettingsIT.java | 2 - .../bytes/ByteBufferBytesReference.java | 2 +- .../common/cache/CacheTests.java | 14 +- .../common/cli/TerminalTests.java | 1 - .../common/collect/IteratorsTests.java | 7 +- .../common/geo/GeoJSONShapeParserTests.java | 15 +- .../common/geo/ShapeBuilderTests.java | 1 - .../geo/builders/CircleBuilderTests.java | 1 - .../geo/builders/EnvelopeBuilderTests.java | 1 - .../geo/builders/LineStringBuilderTests.java | 1 - .../builders/MultiLineStringBuilderTests.java | 1 - .../geo/builders/MultiPointBuilderTests.java | 1 - .../geo/builders/PointBuilderTests.java | 1 - .../geo/builders/PolygonBuilderTests.java | 1 - .../common/hppc/HppcMapsTests.java | 1 - .../common/inject/ModuleTestCase.java | 8 +- .../common/io/FileSystemUtilsTests.java | 3 +- .../common/io/stream/StreamTests.java | 7 +- .../lucene/IndexCacheableQueryTests.java | 1 - .../common/lucene/LuceneTests.java | 15 +- .../lucene/index/FreqTermsEnumTests.java | 17 +- .../common/network/CidrsTests.java | 8 +- .../common/settings/SettingTests.java | 1 - .../transport/BoundTransportAddressTests.java | 4 +- .../common/util/BigArraysTests.java | 2 +- .../common/util/BytesRefHashTests.java | 1 - .../common/util/CollectionUtilsTests.java | 9 +- .../common/util/ExtensionPointTests.java | 6 +- .../common/util/LongHashTests.java | 1 - .../common/util/LongObjectHashMapTests.java | 1 - .../util/MultiDataPathUpgraderTests.java | 2 +- .../concurrent/PrioritizedExecutorsTests.java | 8 +- .../common/xcontent/XContentFactoryTests.java | 9 +- .../support/filtering/FilterPathTests.java | 9 +- .../deps/jackson/JacksonLocationTests.java | 1 - ...usterStatePublishResponseHandlerTests.java | 5 +- .../DiscoveryWithServiceDisruptionsIT.java | 39 +- .../zen/ElectMasterServiceTests.java | 6 +- .../zen/NodeJoinControllerTests.java | 13 +- .../discovery/zen/ZenDiscoveryIT.java | 10 +- .../PendingClusterStatesQueueTests.java | 14 +- .../PublishClusterStateActionTests.java | 29 +- .../document/DocumentActionsIT.java | 2 +- .../fieldstats/FieldStatsIntegrationIT.java | 8 +- .../fieldstats/FieldStatsTests.java | 11 +- .../gateway/MetaDataStateFormatTests.java | 27 +- .../gateway/PrimaryShardAllocatorTests.java | 9 +- .../gateway/PriorityComparatorTests.java | 13 +- .../gateway/RecoveryFromGatewayIT.java | 1 - .../gateway/ReplicaShardAllocatorTests.java | 10 +- .../gateway/ReusePeerRecoverySharedTest.java | 2 - .../http/netty/NettyHttpClient.java | 18 +- .../elasticsearch/index/IndexModuleTests.java | 6 +- .../index/IndexWithShadowReplicasIT.java | 1 - .../index/TransportIndexFailuresIT.java | 3 +- .../index/analysis/AnalysisModuleTests.java | 5 +- .../index/analysis/AnalysisServiceTests.java | 6 +- .../index/analysis/CompoundAnalysisTests.java | 4 +- .../analysis/NGramTokenizerFactoryTests.java | 7 +- .../index/analysis/PatternAnalyzerTests.java | 28 +- .../ShingleTokenFilterFactoryTests.java | 1 - .../index/analysis/StopTokenFilterTests.java | 1 - .../CommonGramsTokenFilterFactoryTests.java | 2 - .../cache/bitset/BitSetFilterCacheTests.java | 8 +- .../index/engine/InternalEngineTests.java | 42 +- .../index/engine/ShadowEngineTests.java | 15 +- .../fielddata/AbstractFieldDataTestCase.java | 8 +- .../AbstractGeoFieldDataTestCase.java | 11 +- .../AbstractStringFieldDataTestCase.java | 1 - .../fielddata/BinaryDVFieldDataTests.java | 1 - .../index/fielddata/DuelFieldDataTests.java | 1 - .../index/fielddata/FieldDataCacheTests.java | 6 +- .../index/fielddata/GeoFieldDataTests.java | 4 +- .../fielddata/IndexFieldDataServiceTests.java | 16 +- .../NoOrdinalsStringFieldDataTests.java | 1 - .../fieldcomparator/ReplaceMissingTests.java | 38 +- .../index/mapper/DynamicMappingTests.java | 4 - .../index/mapper/MapperServiceTests.java | 10 +- .../mapper/all/SimpleAllMapperTests.java | 13 +- .../mapper/binary/BinaryMappingTests.java | 5 +- .../CompletionFieldMapperTests.java | 17 +- .../mapper/core/BinaryFieldTypeTests.java | 1 - .../TokenCountFieldMapperIntegrationIT.java | 1 - .../core/TokenCountFieldMapperTests.java | 6 +- .../mapper/date/SimpleDateMappingTests.java | 18 +- .../mapper/externalvalues/ExternalMapper.java | 2 - .../ExternalMetadataMapper.java | 5 +- .../mapper/geo/GeoPointFieldMapperTests.java | 3 - .../geo/GeohashMappingGeoPointTests.java | 1 - .../mapper/index/IndexTypeMapperTests.java | 14 +- .../mapper/multifield/MultiFieldTests.java | 20 +- .../mapper/parent/ParentMappingTests.java | 2 - .../routing/RoutingTypeMapperTests.java | 5 +- .../source/DefaultSourceMappingTests.java | 7 +- .../timestamp/TimestampMappingTests.java | 1 - .../index/query/AbstractQueryTestCase.java | 28 +- .../index/query/BoolQueryBuilderTests.java | 17 +- .../index/query/DisMaxQueryBuilderTests.java | 50 +- .../GeoBoundingBoxQueryBuilderTests.java | 28 +- .../query/GeoDistanceQueryBuilderTests.java | 5 +- .../query/GeoDistanceRangeQueryTests.java | 27 +- .../query/GeoPolygonQueryBuilderTests.java | 19 +- .../query/GeohashCellQueryBuilderTests.java | 15 +- .../query/HasChildQueryBuilderTests.java | 14 +- .../query/HasParentQueryBuilderTests.java | 32 +- .../index/query/MatchQueryBuilderTests.java | 136 ++-- .../query/MultiMatchQueryBuilderTests.java | 36 +- .../query/QueryStringQueryBuilderTests.java | 52 +- .../index/query/RangeQueryBuilderTests.java | 23 +- .../query/SimpleQueryStringBuilderTests.java | 33 +- .../index/query/TermsQueryBuilderTests.java | 12 +- .../index/query/WrapperQueryBuilderTests.java | 11 +- .../FunctionScoreQueryBuilderTests.java | 36 +- .../functionscore/FunctionScoreTests.java | 3 +- .../query/support/QueryInnerHitsTests.java | 1 - .../index/search/MultiMatchQueryTests.java | 7 +- .../index/search/geo/GeoUtilsTests.java | 1 - .../shard/IndexSearcherWrapperTests.java | 8 +- .../index/shard/IndexShardTests.java | 29 +- .../index/shard/NewPathForShardTests.java | 12 +- .../index/shard/ShardUtilsTests.java | 5 +- .../blobstore/SlicedInputStreamTests.java | 1 - .../index/store/CorruptedFileIT.java | 33 +- .../index/store/CorruptedTranslogIT.java | 1 - .../index/store/ExceptionRetryIT.java | 1 - .../index/store/IndexStoreTests.java | 7 +- .../index/store/LegacyVerificationTests.java | 52 +- .../elasticsearch/index/store/StoreTests.java | 51 +- .../index/translog/BufferedTranslogTests.java | 1 - .../index/translog/TranslogTests.java | 21 +- .../IndexLifecycleActionIT.java | 10 +- ...DateMathIndexExpressionsIntegrationIT.java | 3 +- .../indices/IndicesModuleTests.java | 9 +- .../indices/analysis/DummyAnalyzer.java | 3 - .../cache/query/IndicesQueryCacheTests.java | 4 +- .../elasticsearch/indices/flush/FlushIT.java | 2 +- .../indices/flush/SyncedFlushUtil.java | 5 - .../mapping/UpdateMappingIntegrationIT.java | 15 +- .../memory/IndexingMemoryControllerTests.java | 7 +- .../breaker/CircuitBreakerUnitTests.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 1 - .../recovery/RecoverySourceHandlerTests.java | 2 +- .../indices/recovery/RecoveryStateTests.java | 17 +- .../settings/UpdateNumberOfReplicasIT.java | 2 +- .../indices/state/RareClusterStateIT.java | 11 +- .../indices/state/SimpleIndexStateIT.java | 3 +- .../indices/stats/IndexStatsIT.java | 4 +- .../store/IndicesStoreIntegrationIT.java | 17 +- .../indices/warmer/IndicesWarmerBlocksIT.java | 1 - .../indices/warmer/SimpleIndicesWarmerIT.java | 1 - .../monitor/os/OsProbeTests.java | 10 +- .../InternalSettingsPreparerTests.java | 5 +- .../nodesinfo/SimpleNodesInfoIT.java | 4 +- .../percolator/PercolatorIT.java | 1 - .../percolator/RecoveryPercolatorIT.java | 2 +- .../plugins/PluginInfoTests.java | 3 - .../TestResponseHeaderRestAction.java | 8 +- .../recovery/RecoveryWhileUnderLoadIT.java | 5 +- .../elasticsearch/recovery/RelocationIT.java | 1 - .../recovery/TruncatedRecoveryIT.java | 1 - .../rest/util/RestUtilsTests.java | 4 +- .../routing/SimpleRoutingIT.java | 20 +- .../script/ScriptModesTests.java | 1 - .../script/ScriptServiceTests.java | 2 - .../search/MultiValueModeTests.java | 8 +- .../search/SearchModuleTests.java | 1 - .../search/aggregations/CombiIT.java | 1 - .../search/aggregations/MetaDataIT.java | 1 - .../aggregations/bucket/ChildrenIT.java | 2 - .../aggregations/bucket/DateHistogramIT.java | 2 +- .../aggregations/bucket/GeoHashGridIT.java | 1 - .../bucket/ShardSizeTestCase.java | 3 - .../SignificantTermsSignificanceScoreIT.java | 22 +- .../SignificanceHeuristicTests.java | 13 +- .../aggregations/metrics/GeoBoundsIT.java | 10 +- .../aggregations/metrics/ValueCountIT.java | 13 +- .../cardinality/HyperLogLogPlusPlusTests.java | 1 - .../pipeline/moving/avg/MovAvgIT.java | 1 - .../support/MissingValuesTests.java | 1 - .../support/ScriptValuesTests.java | 1 - .../basic/SearchWhileCreatingIndexIT.java | 4 +- .../basic/TransportSearchFailuresIT.java | 2 +- .../search/child/ChildQuerySearchIT.java | 2 +- .../elasticsearch/search/geo/GeoFilterIT.java | 7 +- .../search/geo/GeoShapeQueryTests.java | 1 - .../search/highlight/HighlighterSearchIT.java | 3 +- .../search/innerhits/InnerHitsIT.java | 2 +- .../search/morelikethis/MoreLikeThisIT.java | 2 +- .../search/nested/SimpleNestedIT.java | 2 +- .../search/preference/SearchPreferenceIT.java | 2 +- .../search/profile/QueryProfilerIT.java | 16 +- .../search/profile/RandomQueryGenerator.java | 17 +- .../elasticsearch/search/query/ExistsIT.java | 1 - .../search/query/MultiMatchQueryIT.java | 1 - .../search/scroll/DuelScrollIT.java | 1 - .../suggest/CompletionSuggestSearchIT.java | 23 +- .../ContextCompletionSuggestSearchIT.java | 19 +- .../CategoryContextMappingTests.java | 16 +- .../completion/GeoContextMappingTests.java | 9 +- .../AbstractSnapshotIntegTestCase.java | 7 +- .../DedicatedClusterSnapshotRestoreIT.java | 1 - .../SharedClusterSnapshotRestoreIT.java | 5 +- .../SnapshotBackwardsCompatibilityIT.java | 3 +- .../test/geo/RandomShapeGenerator.java | 1 - .../hamcrest/ElasticsearchGeoAssertions.java | 29 +- .../ThreadPoolSerializationTests.java | 1 - .../UpdateThreadPoolSettingsTests.java | 7 +- .../AbstractSimpleTransportTestCase.java | 2 +- .../NettySizeHeaderFrameDecoderTests.java | 2 +- distribution/build.gradle | 7 +- .../org/elasticsearch/test/rest/RestIT.java | 1 - .../org/elasticsearch/test/rest/RestIT.java | 1 - .../org/elasticsearch/test/rest/RestIT.java | 1 - .../org/elasticsearch/test/rest/RestIT.java | 1 - .../org/elasticsearch/test/rest/RestIT.java | 1 - .../expression/CountMethodValueSource.java | 1 - .../expression/DateMethodFunctionValues.java | 8 +- .../expression/DateMethodValueSource.java | 9 +- .../expression/MoreExpressionTests.java | 5 +- .../groovy/GroovyScriptEngineService.java | 9 +- .../elasticsearch/messy/tests/BulkTests.java | 1 - .../tests/ContextAndHeaderTransportTests.java | 3 - .../messy/tests/EquivalenceTests.java | 3 +- .../messy/tests/HistogramTests.java | 1 - .../messy/tests/MinDocCountTests.java | 3 +- .../messy/tests/RandomScoreFunctionTests.java | 20 +- .../messy/tests/SimpleSortTests.java | 44 +- .../script/groovy/GroovySecurityTests.java | 3 +- .../mustache/MustacheScriptEngineService.java | 1 - .../tests/RenderSearchTemplateTests.java | 2 - .../script/mustache/MustacheTests.java | 1 - .../index/analysis/ICUCollationKeyFilter.java | 11 +- .../IcuFoldingTokenFilterFactory.java | 13 +- .../IcuNormalizerTokenFilterFactory.java | 2 - .../IcuTransformTokenFilterFactory.java | 1 - .../analysis/IndexableBinaryStringTools.java | 14 +- .../analysis/icu/AnalysisICUPlugin.java | 7 +- .../SimpleIcuCollationTokenFilterTests.java | 1 - .../SimpleIcuNormalizerCharFilterTests.java | 1 - .../KuromojiBaseFormFilterFactory.java | 1 - .../KuromojiKatakanaStemmerFactory.java | 1 - .../KuromojiReadingFormFilterFactory.java | 1 - .../kuromoji/AnalysisKuromojiPlugin.java | 2 +- .../index/analysis/KuromojiAnalysisTests.java | 5 +- .../analysis/PhoneticTokenFilterFactory.java | 10 +- .../analysis/phonetic/KoelnerPhonetik.java | 19 +- .../SmartChineseAnalyzerProvider.java | 1 - .../SmartChineseNoOpTokenFilterFactory.java | 2 - ...SmartChineseTokenizerTokenizerFactory.java | 2 - .../pl/PolishStemTokenFilterFactory.java | 2 +- .../TransportDeleteByQueryAction.java | 14 +- .../management/AzureComputeServiceImpl.java | 5 +- .../AzureComputeSettingsFilter.java | 5 +- .../azure/AzureUnicastHostsProvider.java | 9 +- .../cloud/aws/AwsEc2ServiceImpl.java | 7 +- .../ec2/AwsEc2UnicastHostsProvider.java | 16 +- .../cloud/aws/AWSSignersTests.java | 1 - .../discovery/ec2/AmazonEC2Mock.java | 586 ++++++++++++++---- .../discovery/ec2/Ec2DiscoveryTests.java | 1 - .../cloud/gce/GceComputeServiceImpl.java | 9 +- .../gce/GceUnicastHostsProvider.java | 1 - .../gce/RetryHttpInitializerWrapper.java | 10 +- .../discovery/gce/GceDiscoveryPlugin.java | 1 - .../gce/RetryHttpInitializerWrapperTests.java | 1 - .../multicast/MulticastDiscoveryPlugin.java | 6 +- .../discovery/multicast/MulticastZenPing.java | 5 +- .../plugin/example/ExampleCatAction.java | 5 +- .../plugin/example/JvmExamplePlugin.java | 1 - .../JavaScriptScriptEngineService.java | 17 +- .../support/ScriptValueConverter.java | 14 +- .../org/elasticsearch/plan/a/Adapter.java | 14 +- .../org/elasticsearch/plan/a/Analyzer.java | 82 ++- .../org/elasticsearch/plan/a/Compiler.java | 10 +- .../java/org/elasticsearch/plan/a/Def.java | 11 +- .../plan/a/ErrorHandlingLexer.java | 4 +- .../plan/a/ParserErrorStrategy.java | 4 +- .../org/elasticsearch/plan/a/PlanALexer.java | 83 +-- .../org/elasticsearch/plan/a/PlanAParser.java | 108 ++-- .../plan/a/PlanAScriptEngineService.java | 1 - .../java/org/elasticsearch/plan/a/Writer.java | 76 ++- .../elasticsearch/plan/a/AdditionTests.java | 25 +- .../python/PythonScriptEngineService.java | 20 +- .../mapper/attachments/AttachmentMapper.java | 16 +- .../mapper/attachments/TikaImpl.java | 26 +- .../attachments/EncryptedDocMapperTests.java | 6 +- .../attachments/MapperAttachmentsRestIT.java | 1 - .../attachments/MetadataMapperTests.java | 7 +- .../MultifieldAttachmentMapperTests.java | 5 +- .../SimpleAttachmentMapperTests.java | 5 +- .../mapper/attachments/TikaDocTests.java | 19 +- .../mapper/attachments/VariousDocTests.java | 9 +- .../mapper/murmur3/MapperMurmur3RestIT.java | 1 - .../index/mapper/size/MapperSizeRestIT.java | 1 - .../index/mapper/size/SizeMappingIT.java | 3 - .../index/mapper/size/SizeMappingTests.java | 26 +- .../azure/storage/AzureStorageService.java | 2 +- .../storage/AzureStorageServiceImpl.java | 16 +- .../repositories/azure/AzureRepository.java | 2 +- ...bstractAzureRepositoryServiceTestCase.java | 2 +- .../storage/AzureStorageServiceMock.java | 2 +- .../azure/AzureSettingsParserTest.java | 5 +- .../azure/AzureSnapshotRestoreTests.java | 3 +- .../plugin/hadoop/hdfs/HdfsPlugin.java | 28 +- .../plugin/hadoop/hdfs/Utils.java | 4 +- .../repositories/hdfs/FileSystemFactory.java | 4 +- .../repositories/hdfs/FsCallback.java | 4 +- .../repositories/hdfs/HdfsRepository.java | 26 +- .../repositories/hdfs/SecurityUtils.java | 10 +- .../hadoop/hdfs/HdfsRepositoryRestIT.java | 10 +- .../plugin/hadoop/hdfs/HdfsTestPlugin.java | 2 - .../plugin/hadoop/hdfs/HdfsTests.java | 12 +- .../cloud/aws/InternalAwsS3Service.java | 7 +- .../aws/blobstore/DefaultS3OutputStream.java | 11 +- .../cloud/aws/blobstore/S3BlobContainer.java | 7 +- .../cloud/aws/blobstore/S3BlobStore.java | 8 +- .../repository/s3/S3RepositoryPlugin.java | 1 - .../cloud/aws/AWSSignersTests.java | 1 - .../cloud/aws/AmazonS3Wrapper.java | 81 ++- .../elasticsearch/cloud/aws/TestAmazonS3.java | 7 +- .../s3/AbstractS3SnapshotRestoreTest.java | 1 - .../smbmmapfs/SmbMmapFsDirectoryService.java | 1 - .../lucene/store/ESBaseDirectoryTestCase.java | 5 +- .../bootstrap/BootstrapCliParserTests.java | 9 +- .../common/cli/CheckFileCommandTests.java | 1 - .../EvilInternalSettingsPreparerTests.java | 4 +- .../plugins/PluginManagerPermissionTests.java | 20 +- .../plugins/PluginManagerTests.java | 24 +- .../smoketest/ESSmokeClientTestCase.java | 1 - qa/vagrant/build.gradle | 6 +- .../bootstrap/BootstrapForTesting.java | 27 +- .../script/MockScriptEngine.java | 1 - .../test/CompositeTestCluster.java | 7 +- .../elasticsearch/test/CorruptionUtils.java | 6 +- .../test/ESAllocationTestCase.java | 8 +- .../elasticsearch/test/ESIntegTestCase.java | 3 - .../org/elasticsearch/test/ESTestCase.java | 9 +- .../test/ESTokenStreamTestCase.java | 3 +- .../test/InternalTestCluster.java | 25 +- .../org/elasticsearch/test/StreamsUtils.java | 2 +- .../org/elasticsearch/test/TestCluster.java | 2 +- .../elasticsearch/test/TestSearchContext.java | 2 - .../test/cluster/NoopClusterService.java | 11 +- .../test/cluster/TestClusterService.java | 17 +- .../ClusterDiscoveryConfiguration.java | 3 - .../test/engine/AssertingSearcher.java | 2 - .../test/engine/MockEngineFactory.java | 9 - .../test/engine/MockEngineSupport.java | 8 +- .../test/engine/MockShadowEngine.java | 5 - .../engine/ThrowingLeafReaderWrapper.java | 12 +- .../test/hamcrest/ElasticsearchMatchers.java | 1 - .../test/junit/annotations/Network.java | 4 +- .../junit/listeners/ReproduceInfoPrinter.java | 5 +- .../test/rest/ESRestTestCase.java | 1 - .../test/rest/client/RestPath.java | 8 +- .../rest/client/http/HttpRequestBuilder.java | 8 +- .../test/rest/section/IsFalseAssertion.java | 4 +- .../test/rest/section/IsTrueAssertion.java | 5 +- .../test/rest/section/MatchAssertion.java | 4 +- .../test/store/MockFSDirectoryService.java | 16 +- .../test/store/MockFSIndexStore.java | 6 +- .../test/transport/CapturingTransport.java | 16 +- .../rest/test/AbstractParserTestCase.java | 1 - .../test/test/InternalTestClusterTests.java | 8 +- .../test/test/SuiteScopeClusterIT.java | 1 - 1024 files changed, 5955 insertions(+), 2688 deletions(-) diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index ccb5d5904bf..b28e7210ea4 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -2,7 +2,6 @@ package com.carrotsearch.gradle.junit4 import com.carrotsearch.ant.tasks.junit4.ListenersList import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener -import com.esotericsoftware.kryo.serializers.FieldSerializer import groovy.xml.NamespaceBuilder import groovy.xml.NamespaceBuilderSupport import org.apache.tools.ant.BuildException @@ -14,7 +13,10 @@ import org.gradle.api.file.FileCollection import org.gradle.api.file.FileTreeElement import org.gradle.api.internal.tasks.options.Option import org.gradle.api.specs.Spec -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.InputDirectory +import org.gradle.api.tasks.Optional +import org.gradle.api.tasks.TaskAction import org.gradle.api.tasks.util.PatternFilterable import org.gradle.api.tasks.util.PatternSet import org.gradle.logging.ProgressLoggerFactory diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy index 5e919ce9ba2..4d69d039a6f 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy @@ -27,10 +27,9 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultE import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener import org.gradle.logging.ProgressLogger import org.gradle.logging.ProgressLoggerFactory -import org.junit.runner.Description -import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.* import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.* import static java.lang.Math.max /** diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 0813713353f..15c24f9c793 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -15,15 +15,10 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description +import javax.sound.sampled.* import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger -import javax.sound.sampled.AudioSystem; -import javax.sound.sampled.Clip; -import javax.sound.sampled.Line; -import javax.sound.sampled.LineEvent; -import javax.sound.sampled.LineListener; - import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.* import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index b713c00ed8d..3914442d7da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -19,15 +19,9 @@ package org.elasticsearch.gradle -import org.apache.tools.ant.BuildException -import org.apache.tools.ant.BuildListener -import org.apache.tools.ant.BuildLogger -import org.apache.tools.ant.DefaultLogger -import org.apache.tools.ant.Project +import org.apache.tools.ant.* import org.gradle.api.DefaultTask import org.gradle.api.GradleException -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Optional import org.gradle.api.tasks.TaskAction import java.nio.charset.Charset diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 10f479ee100..c246213fd15 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -18,11 +18,6 @@ */ package org.elasticsearch.gradle -import org.gradle.process.ExecResult - -import java.time.ZonedDateTime -import java.time.ZoneOffset - import nebula.plugin.extraconfigurations.ProvidedBasePlugin import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.* @@ -32,8 +27,12 @@ import org.gradle.api.artifacts.maven.MavenPom import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.internal.jvm.Jvm +import org.gradle.process.ExecResult import org.gradle.util.GradleVersion +import java.time.ZoneOffset +import java.time.ZonedDateTime + /** * Encapsulates build configuration for elasticsearch projects. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy index d2059bc4719..12971b7d701 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy @@ -19,9 +19,10 @@ package org.elasticsearch.gradle import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.TaskAction import org.gradle.internal.nativeintegration.filesystem.Chmod -import java.io.File + import javax.inject.Inject /** diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy index 4df6d1b32df..248083af5e0 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy @@ -19,8 +19,9 @@ package org.elasticsearch.gradle import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import java.io.File +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.OutputFile +import org.gradle.api.tasks.TaskAction /** * Creates a file and sets it contents to something. diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 9a000ab3296..db3f3eca50a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -22,7 +22,6 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project -import org.gradle.api.Task import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index ce6b3958eca..dd5bcaedb0b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -20,7 +20,6 @@ package org.elasticsearch.gradle.plugin import org.gradle.api.Project import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Optional /** * A container for plugin properties that will be written to the plugin descriptor, for easy diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index e2f10100269..6fa37be309e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -18,7 +18,9 @@ */ package org.elasticsearch.gradle.precommit -import org.gradle.api.* +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 08976dbdb39..c7c62e0da0a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -27,7 +27,9 @@ import org.gradle.api.* import org.gradle.api.artifacts.Configuration import org.gradle.api.file.FileCollection import org.gradle.api.logging.Logger -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Copy +import org.gradle.api.tasks.Delete +import org.gradle.api.tasks.Exec import java.nio.file.Paths diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 24bd57a3a59..75612bb552e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,8 +20,6 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.gradle.api.Project -import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.tasks.Input diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy index 842ef8c35cd..bebed415ad8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy @@ -1,7 +1,6 @@ package org.elasticsearch.gradle.test import org.gradle.api.DefaultTask -import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.util.ConfigureUtil diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy index 3063853c871..af2b20e4abf 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy @@ -27,7 +27,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin -import org.gradle.plugins.ide.eclipse.model.EclipseClasspath /** Configures the build to have a rest integration test. */ public class StandaloneTestBasePlugin implements Plugin { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy index 6af9edd119c..2f2030f6cd2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy @@ -19,8 +19,7 @@ package org.elasticsearch.gradle.vagrant import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import org.gradle.logging.ProgressLogger +import org.gradle.api.tasks.TaskAction import org.gradle.logging.ProgressLoggerFactory import org.gradle.process.internal.ExecAction import org.gradle.process.internal.ExecActionFactory diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index 351c34a893b..92b4a575eba 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -19,8 +19,7 @@ package org.elasticsearch.gradle.vagrant import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import org.gradle.logging.ProgressLogger +import org.gradle.api.tasks.TaskAction import org.gradle.logging.ProgressLoggerFactory import org.gradle.process.internal.ExecAction import org.gradle.process.internal.ExecActionFactory diff --git a/core/build.gradle b/core/build.gradle index 61cdd12a194..9bbf8bc1593 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -17,9 +17,9 @@ * under the License. */ + import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.elasticsearch.gradle.test.RestSpecHack apply plugin: 'elasticsearch.build' apply plugin: 'com.bmuschko.nexus' diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 81f49055223..0d78c952765 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -18,9 +18,19 @@ */ package org.apache.lucene.queries; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermState; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.ToStringUtils; diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 9f2b1b66221..744950ed4f6 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -23,7 +23,14 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.lucene.search.Queries; @@ -35,7 +42,12 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 95657de5158..11b56bdcfe1 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -22,7 +22,11 @@ package org.apache.lucene.search.vectorhighlight; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; @@ -72,10 +76,10 @@ public class CustomFieldQuery extends FieldQuery { super.flatten(sourceQuery, reader, flatQueries, boost); } } - + private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List terms, int[] pos, IndexReader reader, Collection flatQueries) throws IOException { if (currentPos == 0) { - // if we have more than 16 terms + // if we have more than 16 terms int numTerms = 0; for (Term[] currentPosTerm : terms) { numTerms += currentPosTerm.length; @@ -83,7 +87,7 @@ public class CustomFieldQuery extends FieldQuery { if (numTerms > 16) { for (Term[] currentPosTerm : terms) { for (Term term : currentPosTerm) { - super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); + super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); } } return; diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 18376aff88f..9f9dbf18869 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -30,7 +30,13 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; /** diff --git a/core/src/main/java/org/elasticsearch/action/ActionFuture.java b/core/src/main/java/org/elasticsearch/action/ActionFuture.java index 2d5f6781d71..1bd5d16b03d 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionFuture.java +++ b/core/src/main/java/org/elasticsearch/action/ActionFuture.java @@ -19,8 +19,6 @@ package org.elasticsearch.action; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import java.util.concurrent.Future; diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index adcb873e838..8f0148d50ae 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -58,6 +58,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; +import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction; +import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; @@ -79,7 +81,9 @@ import org.elasticsearch.action.admin.indices.exists.indices.TransportIndicesExi import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; +import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.TransportForceMergeAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; @@ -107,8 +111,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; -import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; @@ -123,8 +125,6 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction; -import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.delete.TransportDeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction; diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 009d3fc47a9..c0389c6c194 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java b/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java index a3e7474ea6b..3f8ce1441c5 100644 --- a/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java +++ b/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java @@ -19,7 +19,6 @@ package org.elasticsearch.action; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index 2c66a5ef762..425d599ba8f 100644 --- a/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -23,7 +23,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; /** * A generic proxy that will execute the given action against a specific node. diff --git a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java index ff31bb715db..35340213145 100644 --- a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java +++ b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -52,4 +51,4 @@ public class UnavailableShardsException extends ElasticsearchException { public RestStatus status() { return RestStatus.SERVICE_UNAVAILABLE; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index 27add930ea8..ccae17b1eeb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; +import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index f1cc59ba760..9830305662e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -23,7 +23,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java index 069e4197e8a..60001f50b62 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java @@ -19,9 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index 329be6c7614..d262888e964 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.node.info; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 5da4f8897cd..65913bc4b28 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -23,7 +23,9 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; public final class TransportLivenessAction implements TransportRequestHandler { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index 1b4b7b06790..a4cf2b1de2a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.node.stats; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; @@ -340,4 +339,4 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { return builder; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java index 152f8230658..451ef21f632 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java @@ -24,11 +24,12 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.repositories.VerificationFailure; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentHelper; import java.io.IOException; -import java.util.Arrays; /** * Unregister repository response diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 28f9cb1db90..fbb6a8d18e8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.reroute; -import org.elasticsearch.Version; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 99815b77ff7..75f94921e61 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -41,8 +41,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import static org.elasticsearch.cluster.ClusterState.builder; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java index ac01a40748b..a4321e56e43 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Map; /** */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java index 722eab9d709..2a3f8a0889a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 70f49750f19..b5bb259409a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index a3708f62bfd..72b686ccfde 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.state; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java index 78c217d3621..3a6168315e0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java @@ -19,11 +19,11 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -107,4 +107,4 @@ public class ClusterStatsNodeResponse extends BaseNodeResponse { ss.writeTo(out); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index d8f2a5bbd20..5604616ed39 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.cursors.ObjectIntCursor; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; @@ -38,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.plugins.PluginInfo; import java.io.IOException; import java.net.InetAddress; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index 5deb1fd7378..4eac6650a5e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -168,4 +168,4 @@ public class ClusterStatsResponse extends BaseNodesResponse { diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index a6af7050fd7..323a37dfb60 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -20,13 +20,17 @@ package org.elasticsearch.action.get; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.RealtimeRequest; +import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -37,7 +41,11 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.search.fetch.source.FetchSourceContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; public class MultiGetRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index c4756550430..14534b413da 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.get; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 501c003c249..6a7e6cafef8 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -22,7 +22,11 @@ package org.elasticsearch.action.index; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.DocumentRequest; +import org.elasticsearch.action.RoutingMissingException; +import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -36,7 +40,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperParsingException; diff --git a/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java b/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java index d826de0bb53..04470850030 100644 --- a/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java +++ b/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java @@ -30,7 +30,6 @@ import org.elasticsearch.script.ScriptService; import java.io.IOException; import java.util.Iterator; -import java.util.Map; /** * The response of a get script action. diff --git a/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java b/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java index 76b2dcc0522..82d0f9b7bb0 100644 --- a/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java +++ b/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.indexedscripts.put; -import java.nio.charset.StandardCharsets; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -40,6 +39,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.script.ScriptService; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java index 677f2aa88e6..13c2526a7a3 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java @@ -35,7 +35,11 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.highlight.HighlightField; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Encapsulates the response of a percolator request. diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index 79f51db59f7..b080039ed0d 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java index 743b4dfe27b..bf7b9e5307b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java @@ -22,7 +22,12 @@ package org.elasticsearch.action.percolate; import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.UnavailableShardsException; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetItemResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; @@ -42,7 +47,11 @@ import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index 40e8b0730ff..ebf3144a540 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -28,7 +28,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; /** * diff --git a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 79d0de4884e..0139186562c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -25,15 +25,12 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 3177c676dc3..c106cd1d4e5 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -20,7 +20,10 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.type.*; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryThenFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterService; @@ -36,7 +39,8 @@ import org.elasticsearch.transport.TransportService; import java.util.Map; import java.util.Set; -import static org.elasticsearch.action.search.SearchType.*; +import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; +import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH; /** * diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 2f4ce5dcc4a..445dec114ee 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -31,7 +31,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import static org.elasticsearch.action.search.type.ParsedScrollId.*; +import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_AND_FETCH_TYPE; +import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_THEN_FETCH_TYPE; import static org.elasticsearch.action.search.type.TransportSearchHelper.parseScrollId; /** diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java index 20bb205fef5..faaf1215542 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java index c23e5b70c15..1d8589e7a96 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index 8f2df714319..8dd9c139c33 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ReduceSearchPhaseException; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 31cd3986d2f..12c5f733643 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.ActionListener; diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index d278a992e93..07ddff3348c 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -19,7 +19,11 @@ package org.elasticsearch.action.support; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java index 68f560a23cd..bd070bdc887 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java @@ -25,10 +25,8 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; -import java.util.concurrent.TimeUnit; /** * diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 00f04e5c0ff..3159c3ab2b6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -36,7 +36,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 50b71aecbd2..511b379ce8a 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -33,7 +33,13 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.NodeShouldNotConnectException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d17cc02c5b0..34f7422196e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -40,7 +40,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; @@ -59,7 +61,17 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportChannelResponseHandler; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Collections; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java index a40e293f64e..13266b9151d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.support.single.instance; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 5f4f9420295..50cdd0ebeb2 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support.single.instance; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.UnavailableShardsException; @@ -42,10 +41,14 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; /** diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 6b419abece1..c14878a3fca 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -39,7 +39,11 @@ import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; import java.util.function.Supplier; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java index d4451157c4b..9cc328c2be7 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.Action; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; /** diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java index 6d880a7b176..a8812fa8d1a 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java @@ -20,17 +20,26 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.RealtimeRequest; +import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; public class MultiTermVectorsRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java index 7be1061d2f6..0b4152fed5c 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java @@ -26,7 +26,10 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostAttribute; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -171,7 +174,7 @@ public final class TermVectorsFields extends Fields { public Terms terms(String field) throws IOException { // first, find where in the termVectors bytes the actual term vector for // this field is stored - final int keySlot = fieldMap.indexOf(field); + final int keySlot = fieldMap.indexOf(field); if (keySlot < 0) { return null; // we don't have it. } diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java index a33e8e2cd42..e6904ee5ede 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java @@ -18,7 +18,11 @@ */ package org.elasticsearch.action.termvectors; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -204,21 +208,21 @@ public class TermVectorsFilter { BytesRef termBytesRef = termsEnum.term(); boolean foundTerm = topLevelTermsEnum.seekExact(termBytesRef); assert foundTerm : "Term: " + termBytesRef.utf8ToString() + " not found!"; - + Term term = new Term(fieldName, termBytesRef); - + // remove noise words int freq = getTermFreq(termsEnum, docsEnum); if (isNoise(term.bytes().utf8ToString(), freq)) { continue; } - + // now call on docFreq long docFreq = getTermStatistics(topLevelTermsEnum, term).docFreq(); if (!isAccepted(docFreq)) { continue; } - + // filter based on score float score = computeScore(docFreq, freq, numDocs); queue.addOrUpdate(new ScoreTerm(term.field(), term.bytes().utf8ToString(), score)); diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java index 9bf9957e1e3..c3a474cd21e 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.termvectors; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java index 89a8ff088f6..6b5e497b8e5 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java @@ -18,7 +18,11 @@ */ package org.elasticsearch.action.termvectors; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.TermStatistics; @@ -288,4 +292,4 @@ final class TermVectorsWriter { // further... output.writeVLong(Math.max(0, value + 1)); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index bff22bc19f5..573f3d5be3e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -22,7 +22,6 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.NativeLong; import com.sun.jna.Structure; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -53,21 +52,21 @@ final class JNACLibrary { static native int mlockall(int flags); static native int geteuid(); - + /** corresponds to struct rlimit */ public static final class Rlimit extends Structure implements Structure.ByReference { public NativeLong rlim_cur = new NativeLong(0); public NativeLong rlim_max = new NativeLong(0); - + @Override protected List getFieldOrder() { return Arrays.asList(new String[] { "rlim_cur", "rlim_max" }); } } - + static native int getrlimit(int resource, Rlimit rlimit); static native int setrlimit(int resource, Rlimit rlimit); - + static native String strerror(int errno); private JNACLibrary() { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index 26e485802f4..fbd6857d365 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -19,9 +19,12 @@ package org.elasticsearch.bootstrap; -import com.sun.jna.*; +import com.sun.jna.IntegerType; +import com.sun.jna.Native; +import com.sun.jna.NativeLong; +import com.sun.jna.Pointer; +import com.sun.jna.Structure; import com.sun.jna.win32.StdCallLibrary; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 5356d33bb8e..78dbc00ae30 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -21,7 +21,6 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.Pointer; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -56,18 +55,18 @@ class JNANatives { boolean rlimitSuccess = false; long softLimit = 0; long hardLimit = 0; - + try { int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT); if (result == 0) { LOCAL_MLOCKALL = true; return; } - + errno = Native.getLastError(); errMsg = JNACLibrary.strerror(errno); if (Constants.LINUX || Constants.MAC_OS_X) { - // we only know RLIMIT_MEMLOCK for these two at the moment. + // we only know RLIMIT_MEMLOCK for these two at the moment. JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_MEMLOCK, rlimit) == 0) { rlimitSuccess = true; @@ -103,7 +102,7 @@ class JNANatives { } } } - + static String rlimitToString(long value) { assert Constants.LINUX || Constants.MAC_OS_X; if (value == JNACLibrary.RLIM_INFINITY) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java index 9a4a26c74e3..00f60a70a20 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java @@ -26,7 +26,6 @@ import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.ptr.PointerByReference; - import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.logging.ESLogger; @@ -43,7 +42,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -/** +/** * Installs a limited form of secure computing mode, * to filters system calls to block process execution. *

@@ -54,7 +53,7 @@ import java.util.Map; *

* On Linux BPF Filters are installed using either {@code seccomp(2)} (3.17+) or {@code prctl(2)} (3.5+). {@code seccomp(2)} * is preferred, as it allows filters to be applied to any existing threads in the process, and one motivation - * here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method + * here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method * which will at least protect elasticsearch application threads. *

* Linux BPF filters will return {@code EACCES} (Access Denied) for the following system calls: @@ -99,13 +98,13 @@ final class Seccomp { /** Access to non-standard Linux libc methods */ static interface LinuxLibrary extends Library { - /** - * maps to prctl(2) + /** + * maps to prctl(2) */ int prctl(int option, NativeLong arg2, NativeLong arg3, NativeLong arg4, NativeLong arg5); - /** - * used to call seccomp(2), its too new... - * this is the only way, DONT use it on some other architecture unless you know wtf you are doing + /** + * used to call seccomp(2), its too new... + * this is the only way, DONT use it on some other architecture unless you know wtf you are doing */ NativeLong syscall(NativeLong number, Object... args); }; @@ -124,7 +123,7 @@ final class Seccomp { } linux_libc = lib; } - + /** the preferred method is seccomp(2), since we can apply to all threads of the process */ static final int SECCOMP_SET_MODE_FILTER = 1; // since Linux 3.17 static final int SECCOMP_FILTER_FLAG_TSYNC = 1; // since Linux 3.17 @@ -135,7 +134,7 @@ final class Seccomp { static final int PR_GET_SECCOMP = 21; // since Linux 2.6.23 static final int PR_SET_SECCOMP = 22; // since Linux 2.6.23 static final long SECCOMP_MODE_FILTER = 2; // since Linux Linux 3.5 - + /** corresponds to struct sock_filter */ static final class SockFilter { short code; // insn @@ -150,12 +149,12 @@ final class Seccomp { this.k = k; } } - + /** corresponds to struct sock_fprog */ public static final class SockFProg extends Structure implements Structure.ByReference { public short len; // number of filters public Pointer filter; // filters - + public SockFProg(SockFilter filters[]) { len = (short) filters.length; // serialize struct sock_filter * explicitly, its less confusing than the JNA magic we would need @@ -170,13 +169,13 @@ final class Seccomp { } this.filter = filter; } - + @Override protected List getFieldOrder() { return Arrays.asList(new String[] { "len", "filter" }); } } - + // BPF "macros" and constants static final int BPF_LD = 0x00; static final int BPF_W = 0x00; @@ -187,15 +186,15 @@ final class Seccomp { static final int BPF_JGT = 0x20; static final int BPF_RET = 0x06; static final int BPF_K = 0x00; - + static SockFilter BPF_STMT(int code, int k) { return new SockFilter((short) code, (byte) 0, (byte) 0, k); } - + static SockFilter BPF_JUMP(int code, int k, int jt, int jf) { return new SockFilter((short) code, (byte) jt, (byte) jf, k); } - + static final int SECCOMP_RET_ERRNO = 0x00050000; static final int SECCOMP_RET_DATA = 0x0000FFFF; static final int SECCOMP_RET_ALLOW = 0x7FFF0000; @@ -260,13 +259,13 @@ final class Seccomp { /** try to install our BPF filters via seccomp() or prctl() to block execution */ private static int linuxImpl() { // first be defensive: we can give nice errors this way, at the very least. - // also, some of these security features get backported to old versions, checking kernel version here is a big no-no! + // also, some of these security features get backported to old versions, checking kernel version here is a big no-no! final Arch arch = ARCHITECTURES.get(Constants.OS_ARCH); boolean supported = Constants.LINUX && arch != null; if (supported == false) { throw new UnsupportedOperationException("seccomp unavailable: '" + Constants.OS_ARCH + "' architecture unsupported"); } - + // we couldn't link methods, could be some really ancient kernel (e.g. < 2.1.57) or some bug if (linux_libc == null) { throw new UnsupportedOperationException("seccomp unavailable: could not link methods. requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in"); @@ -364,12 +363,12 @@ final class Seccomp { if (linux_prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) != 0) { throw new UnsupportedOperationException("prctl(PR_SET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError())); } - + // check it worked if (linux_prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) != 1) { throw new UnsupportedOperationException("seccomp filter did not really succeed: prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError())); } - + // BPF installed to check arch, limit, then syscall. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details. SockFilter insns[] = { /* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), // @@ -399,11 +398,11 @@ final class Seccomp { } if (linux_prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) != 0) { int errno2 = Native.getLastError(); - throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + + throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + ", prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno2)); } } - + // now check that the filter was really installed, we should be in filter mode. if (linux_prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) { throw new UnsupportedOperationException("seccomp filter installation did not really succeed. seccomp(PR_GET_SECCOMP): " + JNACLibrary.strerror(Native.getLastError())); @@ -486,12 +485,12 @@ final class Seccomp { } } } - + // Solaris implementation via priv_set(3C) /** Access to non-standard Solaris libc methods */ static interface SolarisLibrary extends Library { - /** + /** * see priv_set(3C), a convenience method for setppriv(2). */ int priv_set(int op, String which, String... privs); @@ -511,7 +510,7 @@ final class Seccomp { } libc_solaris = lib; } - + // constants for priv_set(2) static final int PRIV_OFF = 1; static final String PRIV_ALLSETS = null; @@ -531,7 +530,7 @@ final class Seccomp { throw new UnsupportedOperationException("priv_set unavailable: could not link methods. requires Solaris 10+"); } - // drop a null-terminated list of privileges + // drop a null-terminated list of privileges if (libc_solaris.priv_set(PRIV_OFF, PRIV_ALLSETS, PRIV_PROC_FORK, PRIV_PROC_EXEC, null) != 0) { throw new UnsupportedOperationException("priv_set unavailable: priv_set(): " + JNACLibrary.strerror(Native.getLastError())); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 2d342eb5743..43ad73b5dea 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -29,7 +29,8 @@ import org.elasticsearch.http.netty.NettyHttpServerTransport; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.transport.netty.NettyTransport; -import java.io.*; +import java.io.FilePermission; +import java.io.IOException; import java.net.SocketPermission; import java.net.URISyntaxException; import java.net.URL; @@ -49,7 +50,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -/** +/** * Initializes SecurityManager with necessary permissions. *
*

Initialization

@@ -105,8 +106,8 @@ import java.util.Map; final class Security { /** no instantiation */ private Security() {} - - /** + + /** * Initializes SecurityManager for the environment * Can only happen once! * @param environment configuration for generating dynamic permissions @@ -266,11 +267,11 @@ final class Security { policy.add(new FilePermission(environment.pidFile().toString(), "delete")); } } - + static void addBindPermissions(Permissions policy, Settings settings) throws IOException { // http is simple - String httpRange = settings.get("http.netty.port", - settings.get("http.port", + String httpRange = settings.get("http.netty.port", + settings.get("http.port", NettyHttpServerTransport.DEFAULT_PORT_RANGE)); // listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted. // see SocketPermission implies() code @@ -287,8 +288,8 @@ final class Security { for (Map.Entry entry : profiles.entrySet()) { Settings profileSettings = entry.getValue(); String name = entry.getKey(); - String transportRange = profileSettings.get("port", - settings.get("transport.tcp.port", + String transportRange = profileSettings.get("port", + settings.get("transport.tcp.port", NettyTransport.DEFAULT_PORT_RANGE)); // a profile is only valid if its the default profile, or if it has an actual name and specifies a port @@ -300,7 +301,7 @@ final class Security { } } } - + /** * Add access to path (and all files underneath it) * @param policy current policy to add permissions to @@ -320,7 +321,7 @@ final class Security { policy.add(new FilePermission(path.toString(), permissions)); policy.add(new FilePermission(path.toString() + path.getFileSystem().getSeparator() + "-", permissions)); } - + /** * Ensures configured directory {@code path} exists. * @throws IOException if {@code path} exists, but is not a directory, not accessible, or broken symbolic link. diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java index cdcff7810df..dc050e0b6c6 100644 --- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java +++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java @@ -32,7 +32,10 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; import java.util.Locale; -import static org.elasticsearch.common.recycler.Recyclers.*; +import static org.elasticsearch.common.recycler.Recyclers.concurrent; +import static org.elasticsearch.common.recycler.Recyclers.concurrentDeque; +import static org.elasticsearch.common.recycler.Recyclers.dequeFactory; +import static org.elasticsearch.common.recycler.Recyclers.none; /** A recycler of fixed-size pages. */ public class PageCacheRecycler extends AbstractComponent { diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index a396e183bb7..e7461dabfe1 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -33,7 +33,12 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; import org.elasticsearch.action.fieldstats.FieldStatsResponse; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetRequestBuilder; +import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -46,12 +51,32 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptResponse; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; -import org.elasticsearch.action.percolate.*; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.percolate.MultiPercolateRequest; +import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; +import org.elasticsearch.action.percolate.MultiPercolateResponse; +import org.elasticsearch.action.percolate.PercolateRequest; +import org.elasticsearch.action.percolate.PercolateRequestBuilder; +import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollRequestBuilder; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchRequestBuilder; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; @@ -414,7 +439,7 @@ public interface Client extends ElasticsearchClient, Releasable { * Performs multiple search requests. */ MultiSearchRequestBuilder prepareMultiSearch(); - + /** * An action that returns the term vectors for a specific document. * diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 1be22b257e7..947e0f9b526 100644 --- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -19,7 +19,8 @@ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; diff --git a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index 08a95bc71e6..4cf5a5a961d 100644 --- a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -20,7 +20,12 @@ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.threadpool.ThreadPool; public interface ElasticsearchClient { diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index c0a93f5aa05..06d81f0c9d5 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -18,7 +18,11 @@ */ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; diff --git a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 73eed43352b..67205fc0a9a 100644 --- a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -53,13 +53,21 @@ import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.mapping.get.*; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; @@ -87,9 +95,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index 063fd10dcfc..2640618f1bc 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -43,14 +43,14 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 74938875648..65adfad64dc 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -19,7 +19,12 @@ package org.elasticsearch.client.node; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.client.support.Headers; diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index ea57901f2b3..8c0fe125135 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -19,7 +19,12 @@ package org.elasticsearch.client.support; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; @@ -143,6 +148,10 @@ import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; @@ -151,7 +160,14 @@ import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.mapping.get.*; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; @@ -188,10 +204,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; @@ -244,7 +256,14 @@ import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; import org.elasticsearch.action.fieldstats.FieldStatsResponse; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetRequestBuilder; +import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -261,20 +280,52 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; -import org.elasticsearch.action.percolate.*; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.percolate.MultiPercolateAction; +import org.elasticsearch.action.percolate.MultiPercolateRequest; +import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; +import org.elasticsearch.action.percolate.MultiPercolateResponse; +import org.elasticsearch.action.percolate.PercolateAction; +import org.elasticsearch.action.percolate.PercolateRequest; +import org.elasticsearch.action.percolate.PercolateRequestBuilder; +import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollRequestBuilder; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchAction; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchRequestBuilder; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsAction; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsAction; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.*; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ClusterAdminClient; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java b/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java index 4e6da2bd569..317faefa368 100644 --- a/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java +++ b/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java @@ -20,9 +20,9 @@ package org.elasticsearch.cluster; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.StreamableReader; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.StreamableReader; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 43c616d799a..facddab3fb9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -56,13 +56,11 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; @@ -81,7 +79,9 @@ import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.search.internal.DefaultSearchContext; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; /** * Configures classes and services that affect the entire cluster. diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 34ccfd3b433..dd8c737b6b0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -30,7 +30,12 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.Nullable; @@ -51,7 +56,11 @@ import org.elasticsearch.discovery.local.LocalDiscovery; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import java.io.IOException; -import java.util.*; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * Represents the current state of the cluster. diff --git a/core/src/main/java/org/elasticsearch/cluster/Diffable.java b/core/src/main/java/org/elasticsearch/cluster/Diffable.java index 7ce60047a2b..cdad098c38e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/Diffable.java +++ b/core/src/main/java/org/elasticsearch/cluster/Diffable.java @@ -19,8 +19,8 @@ package org.elasticsearch.cluster; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java index 1488f059437..fb9d7159105 100644 --- a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java +++ b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java b/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java index f191c202c45..fb5f2334969 100644 --- a/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java +++ b/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.io.stream.StreamInput; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index dd7eb9f0c6d..a083476ea2f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 6ee8365d378..9a112613b1d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index 4079f14abc7..d4f453530bc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -33,7 +33,12 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; @@ -191,4 +196,4 @@ public class NodeIndexDeletedAction extends AbstractComponent { nodeId = in.readString(); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index f8507e5b689..c7dddce36f7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -31,7 +31,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java index 3b12d874ada..d66a2437ef2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java +++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java @@ -32,7 +32,11 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import static org.elasticsearch.cluster.health.ClusterIndexHealth.readClusterIndexHealth; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 93961bf1fbb..af98d9c2fde 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -39,7 +39,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; @@ -58,7 +62,9 @@ import java.util.Set; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; -import static org.elasticsearch.common.settings.Settings.*; +import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 55cb8a5d944..7719ba8f857 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 96d378af042..b2c9e500f66 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -71,7 +70,14 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 71ef9c22c33..32a66bfb764 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -37,7 +37,11 @@ import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesService; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Service responsible for submitting add and remove aliases requests diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index b38e99d4493..df26df29800 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -29,19 +29,14 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException; import org.elasticsearch.rest.RestStatus; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 790cb99c64b..da2fc064dc4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -36,7 +36,13 @@ import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * Service responsible for submitting index templates updates diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 00904af8915..7bd83b5ddd3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.routing.UnassignedInfo; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 8093d93ccce..739d831597a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -22,7 +22,11 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; @@ -42,7 +46,13 @@ import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.percolator.PercolatorService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Service responsible for submitting mapping changes */ diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index eaa1eefd25e..35c9c51143f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -24,7 +24,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; @@ -40,7 +44,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.settings.IndexDynamicSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 780f511d912..9f0a33ec4cf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 16b7e9e1a13..0bb64220f1f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Booleans; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java index f391bf3d667..9cf429383fd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java @@ -21,7 +21,8 @@ package org.elasticsearch.cluster.routing; import org.apache.lucene.util.CollectionUtil; -import java.util.*; +import java.util.Iterator; +import java.util.List; /** * This class implements a compilation of {@link ShardIterator}s. Each {@link ShardIterator} @@ -42,7 +43,7 @@ public class GroupShardsIterator implements Iterable { } /** - * Returns the total number of shards within all groups + * Returns the total number of shards within all groups * @return total number of shards */ public int totalSize() { @@ -55,7 +56,7 @@ public class GroupShardsIterator implements Iterable { /** * Returns the total number of shards plus the number of empty groups - * @return number of shards and empty groups + * @return number of shards and empty groups */ public int totalSizeWith1ForEmpty() { int size = 0; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index ca071c811e3..bcf489c6a2c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -32,7 +32,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; import java.util.concurrent.ThreadLocalRandom; /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java index 596bb97887c..ff6c8293420 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java @@ -20,9 +20,12 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Iterators; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; /** * A {@link RoutingNode} represents a cluster node associated with a single {@link DiscoveryNode} including all shards @@ -57,7 +60,7 @@ public class RoutingNode implements Iterable { /** * Returns the nodes {@link DiscoveryNode}. - * + * * @return discoveryNode of this node */ public DiscoveryNode node() { @@ -65,7 +68,7 @@ public class RoutingNode implements Iterable { } /** - * Get the id of this node + * Get the id of this node * @return id of the node */ public String nodeId() { @@ -93,7 +96,7 @@ public class RoutingNode implements Iterable { /** * Determine the number of shards with a specific state * @param states set of states which should be counted - * @return number of shards + * @return number of shards */ public int numberOfShardsWithState(ShardRoutingState... states) { int count = 0; @@ -110,7 +113,7 @@ public class RoutingNode implements Iterable { /** * Determine the shards with a specific state * @param states set of states which should be listed - * @return List of shards + * @return List of shards */ public List shardsWithState(ShardRoutingState... states) { List shards = new ArrayList<>(); @@ -125,7 +128,7 @@ public class RoutingNode implements Iterable { } /** - * Determine the shards of an index with a specific state + * Determine the shards of an index with a specific state * @param index id of the index * @param states set of states which should be listed * @return a list of shards diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java index 5cd4366bea4..c683f0200dc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java @@ -19,7 +19,11 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Priority; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index fbabacd79fd..0fb7513f73f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 2268bf1d995..feafb76a5f2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -39,12 +39,15 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index e6dc9a65efd..17e9de19601 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation.allocator; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.IntroSorter; import org.elasticsearch.cluster.metadata.MetaData; @@ -34,10 +33,10 @@ import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java index 1e835dc4039..13ba033d0ef 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.command; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Nullable; @@ -44,12 +43,12 @@ public interface AllocationCommand { /** * Reads an {@link AllocationCommand} of type T from a {@link StreamInput} - * @param in {@link StreamInput} to read the {@link AllocationCommand} from + * @param in {@link StreamInput} to read the {@link AllocationCommand} from * @return {@link AllocationCommand} read from the {@link StreamInput} * @throws IOException if something happens during reading */ T readFrom(StreamInput in) throws IOException; - + /** * Writes an {@link AllocationCommand} to a {@link StreamOutput} * @param command {@link AllocationCommand} to write @@ -57,7 +56,7 @@ public interface AllocationCommand { * @throws IOException if something happens during writing the command */ void writeTo(T command, StreamOutput out) throws IOException; - + /** * Reads an {@link AllocationCommand} of type T from a {@link XContentParser} * @param parser {@link XContentParser} to use @@ -65,7 +64,7 @@ public interface AllocationCommand { * @throws IOException if something happens during reading */ T fromXContent(XContentParser parser) throws IOException; - + /** * Writes an {@link AllocationCommand} using an {@link XContentBuilder} * @param command {@link AllocationCommand} to write diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index f54fce4ca6e..ed535df2f48 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -21,8 +21,8 @@ package org.elasticsearch.cluster.routing.allocation.command; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index a66c8ddaef7..19047bcb0d1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -24,9 +24,9 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 68fd7f3db94..23624f050a9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -33,10 +33,10 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index 597f0add8da..cf889cde6ad 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -23,8 +23,8 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index b97e6138674..bbd28104baf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java b/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java index 1935a3348f2..7c6dec2c1b7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java +++ b/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.settings; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; diff --git a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java b/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java index 38764db9eae..52d5af5e334 100644 --- a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java +++ b/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java @@ -20,7 +20,6 @@ package org.elasticsearch.common; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; - import org.elasticsearch.common.collect.ImmutableOpenMap; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/common/Nullable.java b/core/src/main/java/org/elasticsearch/common/Nullable.java index 4f017e48418..e7397be87df 100644 --- a/core/src/main/java/org/elasticsearch/common/Nullable.java +++ b/core/src/main/java/org/elasticsearch/common/Nullable.java @@ -19,7 +19,11 @@ package org.elasticsearch.common; -import java.lang.annotation.*; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * The presence of this annotation on a method parameter indicates that @@ -32,4 +36,4 @@ import java.lang.annotation.*; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.PARAMETER, ElementType.FIELD, ElementType.METHOD}) public @interface Nullable { -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/common/PidFile.java b/core/src/main/java/org/elasticsearch/common/PidFile.java index 7408e3c9ec8..976a9ac9d90 100644 --- a/core/src/main/java/org/elasticsearch/common/PidFile.java +++ b/core/src/main/java/org/elasticsearch/common/PidFile.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.common; -import java.nio.charset.StandardCharsets; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.IOException; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; diff --git a/core/src/main/java/org/elasticsearch/common/SuppressForbidden.java b/core/src/main/java/org/elasticsearch/common/SuppressForbidden.java index 3dffbdc2d1f..d5d56f415d0 100644 --- a/core/src/main/java/org/elasticsearch/common/SuppressForbidden.java +++ b/core/src/main/java/org/elasticsearch/common/SuppressForbidden.java @@ -19,10 +19,10 @@ package org.elasticsearch.common; +import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.lang.annotation.ElementType; /** * Annotation to suppress forbidden-apis errors inside a whole class, a method, or a field. */ diff --git a/core/src/main/java/org/elasticsearch/common/bytes/BytesArray.java b/core/src/main/java/org/elasticsearch/common/bytes/BytesArray.java index 712cf53501d..5cb690b7375 100644 --- a/core/src/main/java/org/elasticsearch/common/bytes/BytesArray.java +++ b/core/src/main/java/org/elasticsearch/common/bytes/BytesArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.bytes; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,6 +28,7 @@ import org.jboss.netty.buffer.ChannelBuffers; import java.io.IOException; import java.io.OutputStream; import java.nio.channels.GatheringByteChannel; +import java.nio.charset.StandardCharsets; import java.util.Arrays; public class BytesArray implements BytesReference { diff --git a/core/src/main/java/org/elasticsearch/common/bytes/ChannelBufferBytesReference.java b/core/src/main/java/org/elasticsearch/common/bytes/ChannelBufferBytesReference.java index dc5fd31415f..4d6c11214bb 100644 --- a/core/src/main/java/org/elasticsearch/common/bytes/ChannelBufferBytesReference.java +++ b/core/src/main/java/org/elasticsearch/common/bytes/ChannelBufferBytesReference.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.bytes; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,6 +27,7 @@ import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; import java.io.OutputStream; import java.nio.channels.GatheringByteChannel; +import java.nio.charset.StandardCharsets; /** */ diff --git a/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java b/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java index 8d4ac9068e7..603087a9213 100644 --- a/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java +++ b/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.bytes; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; diff --git a/core/src/main/java/org/elasticsearch/common/cache/Cache.java b/core/src/main/java/org/elasticsearch/common/cache/Cache.java index a6c3bc81afd..a5b071c848b 100644 --- a/core/src/main/java/org/elasticsearch/common/cache/Cache.java +++ b/core/src/main/java/org/elasticsearch/common/cache/Cache.java @@ -22,7 +22,11 @@ package org.elasticsearch.common.cache; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.util.concurrent.ReleasableLock; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.LongAdder; diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java index 9523115b024..82898b3e457 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java +++ b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java @@ -22,7 +22,11 @@ package org.elasticsearch.common.cli; import org.apache.commons.cli.CommandLine; import org.elasticsearch.common.SuppressForbidden; -import java.io.*; +import java.io.BufferedReader; +import java.io.Console; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.PrintWriter; import java.util.Locale; /** diff --git a/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java b/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java index c0114480498..3c0579c87e1 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java @@ -18,10 +18,20 @@ */ package org.elasticsearch.common.collect; + import org.apache.lucene.util.mutable.MutableValueInt; import java.lang.reflect.Array; -import java.util.*; +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; import java.util.stream.Stream; /** diff --git a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java index b807d48a971..0b53614723b 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java @@ -19,7 +19,13 @@ package org.elasticsearch.common.collect; -import com.carrotsearch.hppc.*; +import com.carrotsearch.hppc.IntCollection; +import com.carrotsearch.hppc.IntContainer; +import com.carrotsearch.hppc.IntLookupContainer; +import com.carrotsearch.hppc.IntObjectAssociativeContainer; +import com.carrotsearch.hppc.IntObjectHashMap; +import com.carrotsearch.hppc.IntObjectMap; +import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; diff --git a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 47c1bdfb826..c5210b1c2dc 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -19,7 +19,12 @@ package org.elasticsearch.common.collect; -import com.carrotsearch.hppc.*; +import com.carrotsearch.hppc.ObjectCollection; +import com.carrotsearch.hppc.ObjectContainer; +import com.carrotsearch.hppc.ObjectLookupContainer; +import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; +import com.carrotsearch.hppc.ObjectObjectHashMap; +import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.predicates.ObjectObjectPredicate; @@ -241,8 +246,8 @@ public final class ImmutableOpenMap implements Iterable(map); } - - + + /** * Puts all the entries in the map to the builder. */ diff --git a/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java b/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java index 452f644462f..2933bffa630 100644 --- a/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java +++ b/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java @@ -19,11 +19,8 @@ package org.elasticsearch.common.component; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasable; -import java.io.Closeable; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java index 2a31596eab9..69a7d1f3e57 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; + import java.io.IOException; import java.util.Locale; diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 83a8adab1e0..4ed4a2860a3 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -23,7 +23,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Numbers; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -107,53 +106,53 @@ public class GeoUtils { } /** - * Calculate the width (in meters) of geohash cells at a specific level - * @param level geohash level must be greater or equal to zero - * @return the width of cells at level in meters + * Calculate the width (in meters) of geohash cells at a specific level + * @param level geohash level must be greater or equal to zero + * @return the width of cells at level in meters */ public static double geoHashCellWidth(int level) { assert level>=0; // Geohash cells are split into 32 cells at each level. the grid - // alternates at each level between a 8x4 and a 4x8 grid + // alternates at each level between a 8x4 and a 4x8 grid return EARTH_EQUATOR / (1L<<((((level+1)/2)*3) + ((level/2)*2))); } /** - * Calculate the width (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the width of cells at level in meters + * Calculate the width (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the width of cells at level in meters */ public static double quadTreeCellWidth(int level) { assert level >=0; return EARTH_EQUATOR / (1L<=0; // Geohash cells are split into 32 cells at each level. the grid - // alternates at each level between a 8x4 and a 4x8 grid + // alternates at each level between a 8x4 and a 4x8 grid return EARTH_POLAR_DISTANCE / (1L<<((((level+1)/2)*2) + ((level/2)*3))); } - + /** - * Calculate the height (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the height of cells at level in meters + * Calculate the height (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the height of cells at level in meters */ public static double quadTreeCellHeight(int level) { assert level>=0; return EARTH_POLAR_DISTANCE / (1L<=0; @@ -163,20 +162,20 @@ public class GeoUtils { } /** - * Calculate the size (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the size of cells at level in meters + * Calculate the size (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the size of cells at level in meters */ public static double quadTreeCellSize(int level) { assert level>=0; return Math.sqrt(EARTH_POLAR_DISTANCE*EARTH_POLAR_DISTANCE + EARTH_EQUATOR*EARTH_EQUATOR) / (1L<= 0; @@ -195,7 +194,7 @@ public class GeoUtils { * Calculate the number of levels needed for a specific precision. QuadTree * cells will not exceed the specified size (diagonal) of the precision. * @param distance Maximum size of cells as unit string (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int quadTreeLevelsForPrecision(String distance) { return quadTreeLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT)); @@ -205,11 +204,11 @@ public class GeoUtils { * Calculate the number of levels needed for a specific precision. GeoHash * cells will not exceed the specified size (diagonal) of the precision. * @param meters Maximum size of cells in meters (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int geoHashLevelsForPrecision(double meters) { assert meters >= 0; - + if(meters == 0) { return GeohashPrefixTree.getMaxLevelsPossible(); } else { @@ -219,19 +218,19 @@ public class GeoUtils { if(part == 1) return 1; final int bits = (int)Math.round(Math.ceil(Math.log(part) / Math.log(2))); - final int full = bits / 5; // number of 5 bit subdivisions + final int full = bits / 5; // number of 5 bit subdivisions final int left = bits - full*5; // bit representing the last level final int even = full + (left>0?1:0); // number of even levels final int odd = full + (left>3?1:0); // number of odd levels return even+odd; } } - + /** * Calculate the number of levels needed for a specific precision. GeoHash * cells will not exceed the specified size (diagonal) of the precision. * @param distance Maximum size of cells as unit string (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int geoHashLevelsForPrecision(String distance) { return geoHashLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT)); @@ -355,7 +354,7 @@ public class GeoUtils { } /** * Parse a {@link GeoPoint} with a {@link XContentParser}: - * + * * @param parser {@link XContentParser} to parse the value from * @return new {@link GeoPoint} parsed from the parse */ @@ -365,14 +364,14 @@ public class GeoUtils { /** * Parse a {@link GeoPoint} with a {@link XContentParser}. A geopoint has one of the following forms: - * + * *
    *
  • Object:
    {"lat": <latitude>, "lon": <longitude>}
  • *
  • String:
    "<latitude>,<longitude>"
  • *
  • Geohash:
    "<geohash>"
  • *
  • Array:
    [<longitude>,<latitude>]
  • *
- * + * * @param parser {@link XContentParser} to parse the value from * @param point A {@link GeoPoint} that will be reset by the values parsed * @return new {@link GeoPoint} parsed from the parse @@ -381,7 +380,7 @@ public class GeoUtils { double lat = Double.NaN; double lon = Double.NaN; String geohash = null; - + if(parser.currentToken() == Token.START_OBJECT) { while(parser.nextToken() != Token.END_OBJECT) { if(parser.currentToken() == Token.FIELD_NAME) { @@ -433,7 +432,7 @@ public class GeoUtils { } else { return point.reset(lat, lon); } - + } else if(parser.currentToken() == Token.START_ARRAY) { int element = 0; while(parser.nextToken() != Token.END_ARRAY) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java index 64c657c8b6f..42650275b4b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java +++ b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java @@ -20,11 +20,9 @@ package org.elasticsearch.common.geo; import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.ShapeCollection; -import java.util.Collection; import java.util.List; /** diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index 5f11d12a4bf..eb77ef7a46a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index afcf8990513..71b68207e74 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -21,10 +21,10 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; + import java.io.IOException; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 067cd014c0f..5fc6b58e176 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -20,11 +20,10 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Shape; - +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 464d72c8d8c..0bf1ed8fa09 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -19,20 +19,19 @@ package org.elasticsearch.common.geo.builders; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Objects; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Objects; public class LineStringBuilder extends PointCollection { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 4703ac19b08..be09ae81836 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -19,14 +19,13 @@ package org.elasticsearch.common.geo.builders; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index a4d236e3557..b0e86a819aa 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -22,11 +22,11 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; + import java.io.IOException; import java.util.ArrayList; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 2f9d595c9cb..cff06dbfe59 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -19,19 +19,18 @@ package org.elasticsearch.common.geo.builders; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - +import com.spatial4j.core.shape.Shape; +import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import com.spatial4j.core.shape.Shape; -import com.vividsolutions.jts.geom.Coordinate; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Objects; public class MultiPolygonBuilder extends ShapeBuilder { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 35225461658..afb713cb09d 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -19,15 +19,14 @@ package org.elasticsearch.common.geo.builders; -import java.io.IOException; -import java.util.Objects; - +import com.spatial4j.core.shape.Point; +import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import com.spatial4j.core.shape.Point; -import com.vividsolutions.jts.geom.Coordinate; +import java.io.IOException; +import java.util.Objects; public class PointBuilder extends ShapeBuilder { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java index 45ce5adb595..b48aacd857b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java @@ -19,15 +19,14 @@ package org.elasticsearch.common.geo.builders; +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.xcontent.XContentBuilder; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import com.vividsolutions.jts.geom.Coordinate; - /** * The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points. */ diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 03ff6a6b892..026fc9aa170 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -27,7 +27,6 @@ import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Polygon; - import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -40,10 +39,10 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index fcd8177ac6c..d286237e547 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -26,7 +26,6 @@ import com.spatial4j.core.shape.jts.JtsGeometry; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.io.stream.NamedWriteable; @@ -42,7 +41,11 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc diff --git a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java index 01079ecf45a..b99ef895430 100644 --- a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java +++ b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.unit.TimeValue; -import java.io.*; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; diff --git a/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index 154ce88b245..8b501a56129 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -17,7 +17,17 @@ package org.elasticsearch.common.inject; import org.elasticsearch.common.inject.internal.Errors; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.ElementVisitor; +import org.elasticsearch.common.inject.spi.InjectionRequest; +import org.elasticsearch.common.inject.spi.MembersInjectorLookup; +import org.elasticsearch.common.inject.spi.Message; +import org.elasticsearch.common.inject.spi.PrivateElements; +import org.elasticsearch.common.inject.spi.ProviderLookup; +import org.elasticsearch.common.inject.spi.ScopeBinding; +import org.elasticsearch.common.inject.spi.StaticInjectionRequest; +import org.elasticsearch.common.inject.spi.TypeConverterBinding; +import org.elasticsearch.common.inject.spi.TypeListenerBinding; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java b/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java index d2fb6ae4121..87bf31e911e 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.ToStringBuilder; import org.elasticsearch.common.inject.spi.Dependency; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java b/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java index b57f92e7958..49ada56cefa 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.inject.spi.InjectionPoint; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Modifier; /** * Produces construction proxies that invoke the class constructor. diff --git a/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java b/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java index 0e001080fbe..efc10b27e49 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.BindingImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.PrivateElements; diff --git a/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java b/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java index b275ea67a82..c4686e074d0 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java +++ b/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java @@ -17,7 +17,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.ToStringBuilder; import org.elasticsearch.common.inject.spi.Dependency; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/Inject.java b/core/src/main/java/org/elasticsearch/common/inject/Inject.java index ff67b645f2b..a79acab89e5 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Inject.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Inject.java @@ -20,7 +20,9 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; -import static java.lang.annotation.ElementType.*; +import static java.lang.annotation.ElementType.CONSTRUCTOR; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java index 8cffd7e63b4..8739d9182d8 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.spi.Dependency; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java index 81ee9cbbe67..32494cc4cd3 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java +++ b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java @@ -19,7 +19,15 @@ package org.elasticsearch.common.inject; import org.elasticsearch.common.inject.internal.MoreTypes; import org.elasticsearch.common.inject.util.Types; -import java.lang.reflect.*; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.GenericArrayType; +import java.lang.reflect.Member; +import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; +import java.lang.reflect.WildcardType; import java.util.Arrays; import java.util.List; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java index 32b1d60bc14..f2c8d49a8a3 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java @@ -21,7 +21,9 @@ import org.elasticsearch.common.inject.BindingAnnotation; import java.lang.annotation.Retention; import java.lang.annotation.Target; -import static java.lang.annotation.ElementType.*; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java index e067cc813bd..0fae9dede5b 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject.assistedinject; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.BindingAnnotation; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Provider; import java.lang.annotation.Annotation; import java.lang.reflect.ParameterizedType; diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java b/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java index 4dd499e4328..764e93473dd 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject.internal; -import java.lang.annotation.*; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * The presence of this annotation on a method parameter indicates that diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index 38456a4d04b..4b3660fe47a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -16,17 +16,41 @@ package org.elasticsearch.common.inject.spi; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.MembersInjector; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.PrivateBinder; +import org.elasticsearch.common.inject.PrivateModule; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.Scope; +import org.elasticsearch.common.inject.Stage; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedConstantBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; +import org.elasticsearch.common.inject.internal.BindingBuilder; +import org.elasticsearch.common.inject.internal.ConstantBindingBuilderImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ExposureBuilder; +import org.elasticsearch.common.inject.internal.PrivateElementsImpl; +import org.elasticsearch.common.inject.internal.ProviderMethodsModule; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import java.lang.annotation.Annotation; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * Exposes elements of a module so they can be inspected, validated or {@link diff --git a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java index e53e7a73eb7..08761f84ff5 100644 --- a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java +++ b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.io; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import java.io.BufferedReader; @@ -30,7 +29,14 @@ import java.io.Reader; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; -import java.nio.file.*; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.DirectoryStream; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; @@ -273,7 +279,7 @@ public final class FileSystemUtils { Files.walkFileTree(source, new TreeCopier(source, destination, true)); } } - + // TODO: note that this will fail if source and target are on different NIO.2 filesystems. static class TreeCopier extends SimpleFileVisitor { diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index 36b1d9445b0..f922fde3e75 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.io; -import java.nio.charset.StandardCharsets; - import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.util.Callback; @@ -32,6 +30,7 @@ import java.io.OutputStream; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java b/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java index d29687e2dfa..428828bc0fe 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java +++ b/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.joda; -import java.util.Locale; - import org.joda.time.format.DateTimeFormatter; +import java.util.Locale; + /** * A simple wrapper around {@link DateTimeFormatter} that retains the * format that was used to create it. @@ -34,7 +34,7 @@ public class FormatDateTimeFormatter { private final DateTimeFormatter parser; private final DateTimeFormatter printer; - + private final Locale locale; public FormatDateTimeFormatter(String format, DateTimeFormatter parser, Locale locale) { @@ -47,7 +47,7 @@ public class FormatDateTimeFormatter { this.printer = locale == null ? printer.withDefaultYear(1970) : printer.withLocale(locale).withDefaultYear(1970); this.parser = locale == null ? parser.withDefaultYear(1970) : parser.withLocale(locale).withDefaultYear(1970); } - + public String format() { return format; } @@ -59,7 +59,7 @@ public class FormatDateTimeFormatter { public DateTimeFormatter printer() { return this.printer; } - + public Locale locale() { return locale; } diff --git a/core/src/main/java/org/elasticsearch/common/joda/Joda.java b/core/src/main/java/org/elasticsearch/common/joda/Joda.java index 174fe22e15b..34ca5f77da5 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/core/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -20,11 +20,25 @@ package org.elasticsearch.common.joda; import org.elasticsearch.common.Strings; -import org.joda.time.*; +import org.joda.time.Chronology; +import org.joda.time.DateTime; +import org.joda.time.DateTimeField; +import org.joda.time.DateTimeFieldType; +import org.joda.time.DateTimeZone; +import org.joda.time.DurationField; +import org.joda.time.DurationFieldType; +import org.joda.time.ReadablePartial; import org.joda.time.field.DividedDateTimeField; import org.joda.time.field.OffsetDateTimeField; import org.joda.time.field.ScaledDurationField; -import org.joda.time.format.*; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.DateTimeFormatterBuilder; +import org.joda.time.format.DateTimeParser; +import org.joda.time.format.DateTimeParserBucket; +import org.joda.time.format.DateTimePrinter; +import org.joda.time.format.ISODateTimeFormat; +import org.joda.time.format.StrictISODateTimeFormat; import java.io.IOException; import java.io.Writer; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 16a9796d8b6..91eb7b86d57 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -24,8 +24,34 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexFormatTooNewException; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TimeLimitingCollector; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -49,7 +75,12 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; /** * @@ -593,7 +624,7 @@ public class Lucene { /** * Returns true iff the given exception or - * one of it's causes is an instance of {@link CorruptIndexException}, + * one of it's causes is an instance of {@link CorruptIndexException}, * {@link IndexFormatTooOldException}, or {@link IndexFormatTooNewException} otherwise false. */ public static boolean isCorruptionException(Throwable t) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java b/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java index e9c58a78a58..e1fc0171bb7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; import java.io.IOException; @@ -65,4 +69,4 @@ public class MinimumScoreCollector extends SimpleCollector { public boolean needsScores() { return true; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index c405de129a5..b1271e7338d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -23,7 +23,13 @@ import org.apache.lucene.index.LeafReader; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Map; +import java.util.Set; /** * A map between segment core cache keys and the shard that these segments diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java index b394b50683e..c4543435917 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java @@ -18,7 +18,10 @@ */ package org.elasticsearch.common.lucene.index; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java index aff0fa69f09..1094a7b1840 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java @@ -18,7 +18,8 @@ */ package org.elasticsearch.common.lucene.index; -import org.apache.lucene.index.*; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.LeafReader; import org.elasticsearch.index.shard.ShardId; /** diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java index 836c7fdecce..91627361ece 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index 8b1dcd9dfcf..4e2aa5e7eef 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -22,7 +22,11 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; @@ -35,7 +39,11 @@ import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; -import java.util.*; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; /** * diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 662c3294151..d1efdc3ede2 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.lucene.search; import com.carrotsearch.hppc.ObjectHashSet; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; @@ -29,7 +33,11 @@ import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; public class MultiPhrasePrefixQuery extends Query { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index b7f534d2124..5ecd22eab1b 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -20,8 +20,14 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index 53159660089..c223ee46a2f 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -38,8 +38,20 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.document.Document; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; @@ -50,7 +62,12 @@ import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** @@ -845,7 +862,7 @@ public final class XMoreLikeThis { while(docs != null && docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { freq += docs.freq(); } - + // increment frequency Int cnt = termFreqMap.get(term); if (cnt == null) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 3da5ae0e4ab..69cf2bcf684 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -35,7 +35,12 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; /** * A query that allows for a pluggable boost function / filter. If it matches diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 972fb794fb5..b94da9d8b70 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -19,10 +19,14 @@ package org.elasticsearch.common.lucene.search.function; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java index 0cf8a520094..60b8e2086de 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.lucene.uid; * under the License. */ -import java.io.IOException; - import org.apache.lucene.index.Fields; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -36,6 +34,8 @@ import org.elasticsearch.common.lucene.uid.Versions.DocIdAndVersion; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import java.io.IOException; + /** Utility class to do efficient primary-key (only 1 doc contains the * given term) lookups by segment, re-using the enums. This class is diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java index a7993384267..a0cf923c5f1 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.lucene.uid; -import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader.CoreClosedListener; diff --git a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java b/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java index eefaaf22e7a..b2fca5f6605 100644 --- a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java +++ b/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java @@ -19,13 +19,12 @@ package org.elasticsearch.common.metrics; -import java.util.concurrent.atomic.LongAdder; - import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; /** * An exponentially-weighted moving average. - * + * *

* Taken from codahale metric module, changed to use LongAdder * diff --git a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java b/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java index 8f7b46c355f..3a24df0208e 100644 --- a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java +++ b/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java @@ -21,10 +21,10 @@ package org.elasticsearch.common.metrics; import org.elasticsearch.common.util.concurrent.FutureUtils; -import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; /** * A meter metric which measures mean throughput and one-, five-, and diff --git a/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java b/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java index 8b687b1fcec..324db75dc90 100644 --- a/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java +++ b/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java @@ -22,7 +22,15 @@ package org.elasticsearch.common.netty; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; +import org.jboss.netty.channel.ChannelHandler; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelState; +import org.jboss.netty.channel.ChannelStateEvent; +import org.jboss.netty.channel.ChannelUpstreamHandler; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 13743cabcf6..2c599559920 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -22,7 +22,11 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index ac9631d29b1..0e1dcf5a605 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -24,7 +24,15 @@ import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; -import org.elasticsearch.cluster.routing.allocation.decider.*; +import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoverySettings; @@ -39,7 +47,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** * Encapsulates all valid cluster level settings. diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 236df5c567b..4e9e9f9428b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -29,7 +29,9 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/common/text/Text.java b/core/src/main/java/org/elasticsearch/common/text/Text.java index d5b02f559f5..6b3b18c443c 100644 --- a/core/src/main/java/org/elasticsearch/common/text/Text.java +++ b/core/src/main/java/org/elasticsearch/common/text/Text.java @@ -18,10 +18,11 @@ */ package org.elasticsearch.common.text; -import java.nio.charset.StandardCharsets; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import java.nio.charset.StandardCharsets; + /** * Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if * the other is requests, caches the other one in a local reference so no additional conversion will be needed. diff --git a/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java index e3efa20af18..a5c6171392c 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java @@ -21,8 +21,6 @@ package org.elasticsearch.common.transport; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.transport.local.LocalTransport; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java index 910b1fc6af2..b5ccda41d15 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.transport; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; diff --git a/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java index 686ad522446..6a99e06ac01 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java @@ -19,11 +19,11 @@ package org.elasticsearch.common.unit; -import java.util.Objects; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Objects; + import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; /** Utility methods to get memory sizes. */ diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index fb44c7dc9a5..7a542e51022 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -32,7 +32,6 @@ import org.joda.time.format.PeriodFormat; import org.joda.time.format.PeriodFormatter; import java.io.IOException; -import java.io.Serializable; import java.util.Locale; import java.util.Objects; import java.util.concurrent.TimeUnit; diff --git a/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 5a2e21e5e11..ee29df51b55 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.BitMixer; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; diff --git a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java index a605d66e80d..d45afead715 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java +++ b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index a36c37b22e9..739677342f7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -23,9 +23,22 @@ import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.ObjectArrayList; -import org.apache.lucene.util.*; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefArray; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.InPlaceMergeSorter; +import org.apache.lucene.util.IntroSorter; -import java.util.*; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; +import java.util.RandomAccess; /** Collections-related utility methods. */ public class CollectionUtils { diff --git a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java index d25113a54bb..17ae43b1449 100644 --- a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java +++ b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java @@ -24,7 +24,11 @@ import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** * This class defines an official elasticsearch extension point. It registers diff --git a/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java b/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java index 63b7b23a62b..4095f5d7014 100644 --- a/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java +++ b/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasables; import java.util.Iterator; diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java index 2cbc8cbdf99..8d049003824 100644 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java @@ -42,9 +42,19 @@ import org.elasticsearch.index.shard.ShardStateMetaData; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.StandardCharsets; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** */ diff --git a/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java b/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java index 3d7f00747c1..f3d710dab8c 100644 --- a/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java +++ b/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import org.elasticsearch.common.unit.TimeValue; -import java.io.IOException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java index 6d2216d9daa..5fcb1a8f152 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java @@ -23,7 +23,11 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.transport.Transports; import java.util.Objects; -import java.util.concurrent.*; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.AbstractQueuedSynchronizer; public abstract class BaseFuture implements Future { diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 46fd7507f8b..140f026c357 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -22,7 +22,12 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; -import java.util.concurrent.*; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java index 09d644e664f..50d6df9a6a7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.unit.TimeValue; /** * @@ -69,4 +68,4 @@ public abstract class PrioritizedRunnable implements Runnable, Comparable implements BiFunction clazz) { if (!coeerce) { - //Need to throw type IllegalArgumentException as current catch logic in + //Need to throw type IllegalArgumentException as current catch logic in //NumberFieldMapper.parseCreateField relies on this for "malformed" value detection throw new IllegalArgumentException(clazz.getSimpleName() + " value passed as String"); } } - + // The 3rd party parsers we rely on are known to silently truncate fractions: see // http://fasterxml.github.io/jackson-core/javadoc/2.3.0/com/fasterxml/jackson/core/JsonParser.html#getShortValue() // If this behaviour is flagged as undesirable and any truncation occurs @@ -120,7 +124,7 @@ public abstract class AbstractXContentParser implements XContentParser { return intValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public int intValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -130,7 +134,7 @@ public abstract class AbstractXContentParser implements XContentParser { } int result = doIntValue(); ensureNumberConversion(coerce, result, Integer.class); - return result; + return result; } protected abstract int doIntValue() throws IOException; @@ -139,7 +143,7 @@ public abstract class AbstractXContentParser implements XContentParser { public long longValue() throws IOException { return longValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public long longValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -149,7 +153,7 @@ public abstract class AbstractXContentParser implements XContentParser { } long result = doLongValue(); ensureNumberConversion(coerce, result, Long.class); - return result; + return result; } protected abstract long doLongValue() throws IOException; @@ -158,7 +162,7 @@ public abstract class AbstractXContentParser implements XContentParser { public float floatValue() throws IOException { return floatValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public float floatValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -171,7 +175,7 @@ public abstract class AbstractXContentParser implements XContentParser { protected abstract float doFloatValue() throws IOException; - + @Override public double doubleValue() throws IOException { return doubleValue(DEFAULT_NUMBER_COEERCE_POLICY); diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index c24ddb7f296..994df37de30 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -24,9 +24,16 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentGenerator; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; -import java.io.*; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Reader; /** * A YAML based content implementation using Jackson. diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 947bf6099f8..4a3771c8e5a 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -20,7 +20,13 @@ package org.elasticsearch.discovery.local; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; @@ -35,7 +41,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.discovery.*; +import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.DiscoveryStats; +import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.transport.TransportService; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 9cec672ad43..19a2cf06bf4 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -36,7 +36,11 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 8849a849f97..6398f31a8fd 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -22,7 +22,12 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -58,10 +63,19 @@ import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 8842bafb116..73be1d3bb28 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; @@ -31,9 +32,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java index 2abe730b1e8..9386ff6356e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java @@ -28,7 +28,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.ConcurrentMap; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java index 5a96addc842..04af8207c37 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.TimeUnit; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java index 18f734f7136..f97b31861ec 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.zen.ping; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 91fd622023f..acb5f640db0 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -21,7 +21,11 @@ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.bytes.BytesReference; @@ -40,10 +44,22 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/env/ShardLock.java b/core/src/main/java/org/elasticsearch/env/ShardLock.java index 13561f89ab0..4ff1237ba20 100644 --- a/core/src/main/java/org/elasticsearch/env/ShardLock.java +++ b/core/src/main/java/org/elasticsearch/env/ShardLock.java @@ -19,13 +19,10 @@ package org.elasticsearch.env; -import org.apache.lucene.store.Lock; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.index.shard.ShardId; import java.io.Closeable; import java.io.IOException; -import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; /** diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index 4c72894969e..4524222d5a3 100644 --- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index e89cd6c8577..bbb2670e194 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -24,7 +24,11 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index 0850064f320..acd650bc6f7 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingService; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 5e410fb6d53..80e3be78093 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -20,7 +20,11 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 9ac1768522c..5c7b31de23a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -22,19 +22,31 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.OutputStreamIndexOutput; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collection; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 79bfbdac8c2..83eaa791485 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -33,7 +33,13 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; /** diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 539ac924262..27ee0c17dab 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; diff --git a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java index 2eb57f187fa..5c05efcd170 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java @@ -21,7 +21,11 @@ package org.elasticsearch.http.netty; import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.rest.support.RestUtils; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.ChannelHandler; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.http.HttpRequest; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java index 19946c27342..7fcc7b65fba 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java @@ -32,16 +32,36 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.support.RestUtils; import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.channel.*; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; +import org.jboss.netty.handler.codec.http.Cookie; +import org.jboss.netty.handler.codec.http.CookieDecoder; +import org.jboss.netty.handler.codec.http.CookieEncoder; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpHeaders; +import org.jboss.netty.handler.codec.http.HttpMethod; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; +import org.jboss.netty.handler.codec.http.HttpVersion; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; -import static org.elasticsearch.http.netty.NettyHttpServerTransport.*; -import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.*; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_HEADERS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_METHODS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ENABLED; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_MAX_AGE; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_CREDENTIALS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_ORIGIN; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_MAX_AGE; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ORIGIN; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.USER_AGENT; /** * diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index ebc655ae4a5..899bbdc86e2 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -29,17 +29,35 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.*; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.NetworkExceptionHelper; +import org.elasticsearch.common.transport.PortsRange; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.http.*; +import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpChannel; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.http.HttpServerAdapter; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.netty.pipelining.HttpPipeliningHandler; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.transport.BindTransportException; import org.jboss.netty.bootstrap.ServerBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.FixedReceiveBufferSizePredictorFactory; +import org.jboss.netty.channel.ReceiveBufferSizePredictorFactory; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpChunkAggregator; @@ -56,7 +74,15 @@ import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.*; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_REUSE_ADDRESS; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; /** @@ -285,7 +311,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent lastException = new AtomicReference<>(); diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java index 4bcbf4079c0..c291e591dc1 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java @@ -22,13 +22,16 @@ package org.elasticsearch.http.netty.pipelining; // this file is from netty-http-pipelining, under apache 2.0 license // see github.com/typesafehub/netty-http-pipelining -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.jboss.netty.channel.*; -import org.jboss.netty.handler.codec.http.DefaultHttpRequest; +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.handler.codec.http.HttpRequest; -import java.util.*; +import java.util.Comparator; +import java.util.PriorityQueue; +import java.util.Queue; /** * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java index 622a3e6ac9f..2485b7082bd 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java @@ -22,7 +22,11 @@ package org.elasticsearch.http.netty.pipelining; // this file is from netty-http-pipelining, under apache 2.0 license // see github.com/typesafehub/netty-http-pipelining -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.DownstreamMessageEvent; /** * Permits downstream channel events to be ordered and signalled as to whether more are to come for a given sequence. diff --git a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 8244f633939..b155a436108 100644 --- a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -22,7 +22,6 @@ package org.elasticsearch.index; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; /** * @@ -50,4 +49,4 @@ public abstract class AbstractIndexComponent implements IndexComponent { public IndexSettings getIndexSettings() { return indexSettings; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index e81a4a7c86b..5363c0675cc 100644 --- a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -24,9 +24,9 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; -import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.ShardId; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 231baaefaab..68e7b2672fb 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -38,7 +38,11 @@ import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.mapper.MapperRegistry; import java.io.IOException; -import java.util.*; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Consumer; diff --git a/core/src/main/java/org/elasticsearch/index/NodeServicesProvider.java b/core/src/main/java/org/elasticsearch/index/NodeServicesProvider.java index aeb57926c04..a6d464cbaad 100644 --- a/core/src/main/java/org/elasticsearch/index/NodeServicesProvider.java +++ b/core/src/main/java/org/elasticsearch/index/NodeServicesProvider.java @@ -28,7 +28,6 @@ import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; -import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index a2c65c6441d..43c9af672d1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -69,7 +69,14 @@ import java.io.IOException; import java.io.Reader; import java.nio.charset.StandardCharsets; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static java.util.Collections.unmodifiableMap; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 86c06dbe54f..c833f41457e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -29,7 +29,12 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; -import org.elasticsearch.indices.analysis.*; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; +import org.elasticsearch.indices.analysis.PreBuiltCharFilters; +import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; +import org.elasticsearch.indices.analysis.PreBuiltTokenizers; import java.io.Closeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 261add4e220..24da3c267c0 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -21,16 +21,13 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.core.StringFieldMapper; import java.io.Closeable; -import java.io.IOException; import java.util.HashMap; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/DelimitedPayloadTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/DelimitedPayloadTokenFilterFactory.java index fb25c8e0bb5..d620e058e3e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/DelimitedPayloadTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/DelimitedPayloadTokenFilterFactory.java @@ -20,7 +20,11 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.payloads.*; +import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; +import org.apache.lucene.analysis.payloads.FloatEncoder; +import org.apache.lucene.analysis.payloads.IdentityEncoder; +import org.apache.lucene.analysis.payloads.IntegerEncoder; +import org.apache.lucene.analysis.payloads.PayloadEncoder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -32,10 +36,10 @@ public class DelimitedPayloadTokenFilterFactory extends AbstractTokenFilterFacto public static final char DEFAULT_DELIMITER = '|'; public static final PayloadEncoder DEFAULT_ENCODER = new FloatEncoder(); - + static final String ENCODING = "encoding"; static final String DELIMITER = "delimiter"; - + char delimiter; PayloadEncoder encoder; @@ -48,7 +52,7 @@ public class DelimitedPayloadTokenFilterFactory extends AbstractTokenFilterFacto } else { delimiter = DEFAULT_DELIMITER; } - + if (settings.get(ENCODING) != null) { if (settings.get(ENCODING).equals("float")) { encoder = new FloatEncoder(); @@ -56,7 +60,7 @@ public class DelimitedPayloadTokenFilterFactory extends AbstractTokenFilterFacto encoder = new IntegerEncoder(); } else if (settings.get(ENCODING).equals("identity")) { encoder = new IdentityEncoder(); - } + } } else { encoder = DEFAULT_ENCODER; } @@ -67,5 +71,5 @@ public class DelimitedPayloadTokenFilterFactory extends AbstractTokenFilterFacto DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(tokenStream, delimiter, encoder); return filter; } - + } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java index 4c55a275c18..68e3c3ee450 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java @@ -24,7 +24,6 @@ import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import java.util.AbstractMap; -import java.util.Collection; import java.util.Map; import java.util.stream.Stream; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java index db8ccfec611..a012db3b785 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java index 4a2a97e8892..1f44657a897 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.TypeTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.Arrays; @@ -32,7 +32,7 @@ import java.util.Set; /** * A {@link TokenFilterFactory} for {@link TypeTokenFilter}. This filter only * keep tokens that are contained in the set configured via - * {@value #KEEP_TYPES_KEY} setting. + * {@value #KEEP_TYPES_KEY} setting. *

* Configuration options: *

    diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java index 56a62624af9..82b8df70741 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.miscellaneous.KeepWordFilter; import org.apache.lucene.analysis.miscellaneous.Lucene43KeepWordFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java index ebd16cbe2e7..b787ed64090 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java @@ -22,8 +22,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.util.CharArraySet; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java index 7262b0ad9da..d926371ca48 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.NumericTokenStream; import java.io.IOException; -import java.io.Reader; /** * @@ -37,4 +36,4 @@ public class NumericLongTokenizer extends NumericTokenizer { protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setLongValue(Long.parseLong(value)); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java index 43378411ae4..d5da62f67b1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java @@ -20,8 +20,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.pattern.PatternTokenizer; @@ -53,4 +53,4 @@ public final class PatternAnalyzer extends Analyzer { } return new TokenStreamComponents(tokenizer, stream); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java index d2d6aaeaf59..006fb447368 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java @@ -22,8 +22,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.pattern.PatternCaptureGroupTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java index b2441d547cf..a6d22b2be95 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java @@ -21,9 +21,9 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.pattern.PatternReplaceFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java index 4ce0bee7a2c..996cc93cd20 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java @@ -22,14 +22,14 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.en.EnglishPossessiveFilter; -import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.en.EnglishPossessiveFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; -import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.standard.StandardFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; +import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; @@ -40,7 +40,7 @@ import org.apache.lucene.util.Version; * stemmer is the part of the class name before "Stemmer", e.g., the stemmer in * {@link org.tartarus.snowball.ext.EnglishStemmer} is named "English". - * @deprecated (3.1) Use the language-specific analyzer in modules/analysis instead. + * @deprecated (3.1) Use the language-specific analyzer in modules/analysis instead. * This analyzer WAS removed in Lucene 5.0 */ @Deprecated diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java index 3c79abd6fb6..c0d527f1b11 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java index a6cfe91cbf4..156ad1ff07e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java index 11f6a28ec82..66643cc2396 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter; import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter.StemmerOverrideMap; import org.elasticsearch.common.Strings; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.io.IOException; @@ -41,7 +41,7 @@ public class StemmerOverrideTokenFilterFactory extends AbstractTokenFilterFactor if (rules == null) { throw new IllegalArgumentException("stemmer override filter requires either `rules` or `rules_path` to be configured"); } - + StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(false); parseRules(rules, builder, "=>"); overrideMap = builder.build(); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java index 7f8b65676bf..1154f9b0f79 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java @@ -57,7 +57,30 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.tartarus.snowball.ext.*; +import org.tartarus.snowball.ext.ArmenianStemmer; +import org.tartarus.snowball.ext.BasqueStemmer; +import org.tartarus.snowball.ext.CatalanStemmer; +import org.tartarus.snowball.ext.DanishStemmer; +import org.tartarus.snowball.ext.DutchStemmer; +import org.tartarus.snowball.ext.EnglishStemmer; +import org.tartarus.snowball.ext.FinnishStemmer; +import org.tartarus.snowball.ext.FrenchStemmer; +import org.tartarus.snowball.ext.German2Stemmer; +import org.tartarus.snowball.ext.GermanStemmer; +import org.tartarus.snowball.ext.HungarianStemmer; +import org.tartarus.snowball.ext.IrishStemmer; +import org.tartarus.snowball.ext.ItalianStemmer; +import org.tartarus.snowball.ext.KpStemmer; +import org.tartarus.snowball.ext.LithuanianStemmer; +import org.tartarus.snowball.ext.LovinsStemmer; +import org.tartarus.snowball.ext.NorwegianStemmer; +import org.tartarus.snowball.ext.PorterStemmer; +import org.tartarus.snowball.ext.PortugueseStemmer; +import org.tartarus.snowball.ext.RomanianStemmer; +import org.tartarus.snowball.ext.RussianStemmer; +import org.tartarus.snowball.ext.SpanishStemmer; +import org.tartarus.snowball.ext.SwedishStemmer; +import org.tartarus.snowball.ext.TurkishStemmer; /** */ @@ -138,7 +161,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new FrenchLightStemFilter(tokenStream); } else if ("minimal_french".equalsIgnoreCase(language) || "minimalFrench".equalsIgnoreCase(language)) { return new FrenchMinimalStemFilter(tokenStream); - + // Galician stemmers } else if ("galician".equalsIgnoreCase(language)) { return new GalicianStemFilter(tokenStream); @@ -168,7 +191,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("indonesian".equalsIgnoreCase(language)) { return new IndonesianStemFilter(tokenStream); - + // Irish stemmer } else if ("irish".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new IrishStemmer()); @@ -192,8 +215,8 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new NorwegianLightStemFilter(tokenStream); } else if ("minimal_norwegian".equalsIgnoreCase(language) || "minimalNorwegian".equals(language)) { return new NorwegianMinimalStemFilter(tokenStream); - - // Norwegian (Nynorsk) stemmers + + // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { @@ -223,7 +246,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new SnowballFilter(tokenStream, new SpanishStemmer()); } else if ("light_spanish".equalsIgnoreCase(language) || "lightSpanish".equalsIgnoreCase(language)) { return new SpanishLightStemFilter(tokenStream); - + // Sorani Kurdish stemmer } else if ("sorani".equalsIgnoreCase(language)) { return new SoraniStemFilter(tokenStream); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 1f58c8a3d71..3696462c4f5 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -28,9 +28,9 @@ import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index c93bfa34190..f81ac97ba90 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; -import java.io.Reader; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java index 84359268644..0a5a30cc28f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java index f506a6eaab0..eec70134c3f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java index ffb283e7a23..1d5a9563130 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java @@ -25,17 +25,28 @@ import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import java.util.*; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE; public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory { @@ -195,4 +206,4 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory } return new String(out, 0, writePos); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java index d0388205b1b..8d65e008f25 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.DictionaryCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.Lucene43DictionaryCompoundWordTokenFilter; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -42,11 +42,11 @@ public class DictionaryCompoundWordTokenFilterFactory extends AbstractCompoundWo @Override public TokenStream create(TokenStream tokenStream) { if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, + return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } else { return new Lucene43DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java index 841ca7966d9..42a29784acc 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.Lucene43HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.hyphenation.HyphenationTree; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.xml.sax.InputSource; @@ -61,10 +61,10 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW @Override public TokenStream create(TokenStream tokenStream) { if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } else { - return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java b/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java index 947968deab0..62b8d3ba138 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java +++ b/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.cache.query; +import org.apache.lucene.search.DocIdSet; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -27,8 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.apache.lucene.search.DocIdSet; - import java.io.IOException; /** diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java index e4d86be1bda..432f81da8a9 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.mapper.MapperService; + import java.util.Map; /** diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index fa5b89505c8..bb79b7a8352 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.ConcurrentMergeScheduler; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MergeScheduler; +import org.apache.lucene.index.OneMergeHelper; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.CounterMetric; @@ -144,7 +148,7 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { @Override public MergeScheduler clone() { - // Lucene IW makes a clone internally but since we hold on to this instance + // Lucene IW makes a clone internally but since we hold on to this instance // the clone will just be the identity. return this; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 0504fdfa996..4dccd3c6f5e 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -19,7 +19,18 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SegmentReader; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; @@ -51,7 +62,12 @@ import org.elasticsearch.index.translog.Translog; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java index 3384f78433f..ac95799b3bb 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.engine; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.store.Store; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java index a09ad622299..cc82262fd40 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherFactory; -import org.elasticsearch.index.engine.EngineConfig; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index de13eb10977..80d0b5b3006 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -19,8 +19,21 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter.IndexReaderWarmer; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; @@ -61,7 +74,12 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; diff --git a/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java b/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java index 28401496456..31fddbedfb7 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java +++ b/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.engine; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.store.Store; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java index 97750cf0695..e64499e8a3c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java index dc0db303239..f14a0a6314b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 80947260442..39cd710c4ad 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -25,10 +25,10 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.GeoPointArrayIndexFieldData; import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java b/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java index 15aa961294c..57b388b89c0 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.index.fielddata.plain.AbstractIndexFieldData; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.index.fielddata.plain.AbstractIndexFieldData; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java index 157c6fda97c..c35d59fc692 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.InPlaceMergeSorter; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.Sorter; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 1789c3537e6..2ae3f950409 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -19,16 +19,16 @@ package org.elasticsearch.index.fielddata.fieldcomparator; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; @@ -86,7 +86,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); if (indexFieldData instanceof IndexOrdinalsFieldData) { return new FieldComparator.TermOrdValComparator(numHits, null, sortMissingLast) { - + @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); @@ -104,7 +104,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat return new ReplaceMissing(selectedValues, missingBytes); } } - + @Override public void setScorer(Scorer scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); @@ -148,10 +148,10 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat }; } - - /** - * A view of a SortedDocValues where missing values - * are replaced with the specified term + + /** + * A view of a SortedDocValues where missing values + * are replaced with the specified term */ // TODO: move this out if we need it for other reasons static class ReplaceMissing extends SortedDocValues { @@ -159,7 +159,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat final int substituteOrd; final BytesRef substituteTerm; final boolean exists; - + ReplaceMissing(SortedDocValues in, BytesRef term) { this.in = in; this.substituteTerm = term; @@ -204,7 +204,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat return in.lookupOrd(ord); } } - + // we let termsenum etc fall back to the default implementation } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 15961ffedce..fe84f8e8840 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.SortField; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java index e6f1d24f1e3..38b36a640cf 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java @@ -27,10 +27,10 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; import org.elasticsearch.indices.breaker.CircuitBreakerService; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index 3b66adfee9a..d17a9fd07f1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -23,7 +23,13 @@ import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.LongsRef; +import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PagedGrowableWriter; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index 0b86b17f211..237f147ebf0 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -19,16 +19,16 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.MappedFieldType; @@ -88,4 +88,4 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java index 8f0f2798c05..23baeede1cb 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java @@ -26,7 +26,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 3b1629f7882..e4a043852cb 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -26,8 +26,11 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java index 73ae7684f64..36bc0d0a560 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java @@ -18,15 +18,23 @@ */ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilteredTermsEnum; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; import org.elasticsearch.index.mapper.MappedFieldType.Names; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index efe8bc97a30..28fae2c6385 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java index 58a195057bb..859d720d072 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java @@ -21,13 +21,16 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.MultiValueMode; /** * A field data implementation that forbids loading and will throw an {@link IllegalStateException} if you try to load diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java index ce4dc2559e4..c356ff0d2a4 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.GeoUtils; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.util.LongArray; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index e836f13609b..471bea73570 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexSettings; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 2dcffbe12cc..cbf865a202d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -20,7 +20,12 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.codecs.blocktree.FieldReader; import org.apache.lucene.codecs.blocktree.Stats; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PagedBytes; @@ -28,7 +33,12 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.MappedFieldType; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index b1393542098..eba523e0e2e 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -19,8 +19,14 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiDocValues.OrdinalMap; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; @@ -33,8 +39,12 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -45,7 +55,15 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; /** diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java index 0c78d2e5f11..86b08fe1648 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java @@ -19,12 +19,22 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.index.Index; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.FieldData; +import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.NumericDoubleValues; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; @@ -41,7 +51,7 @@ import java.util.Collections; */ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData { private final NumericType numericType; - + public SortedNumericDVIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) { super(index, fieldNames, fieldDataType); if (numericType == null) { @@ -55,7 +65,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple switch (numericType) { case FLOAT: return new FloatValuesComparatorSource(this, missingValue, sortMode, nested); - case DOUBLE: + case DOUBLE: return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested); default: assert !numericType.isFloatingPoint(); @@ -67,7 +77,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple public NumericType getNumericType() { return numericType; } - + @Override public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception { return load(context); @@ -77,7 +87,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple public AtomicNumericFieldData load(LeafReaderContext context) { final LeafReader reader = context.reader(); final String field = fieldNames.indexName(); - + switch (numericType) { case FLOAT: return new SortedNumericFloatFieldData(reader, field); @@ -85,19 +95,19 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return new SortedNumericDoubleFieldData(reader, field); default: return new SortedNumericLongFieldData(reader, field); - } + } } - + /** * FieldData implementation for integral types. *

    - * Order of values within a document is consistent with + * Order of values within a document is consistent with * {@link Long#compareTo(Long)}. *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link DocValues#unwrapSingleton(SortedNumericDocValues)} will return - * the underlying single-valued NumericDocValues representation, and + * the underlying single-valued NumericDocValues representation, and * {@link DocValues#unwrapSingletonBits(SortedNumericDocValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ @@ -125,7 +135,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return Collections.emptyList(); } } - + /** * FieldData implementation for 32-bit float values. *

    @@ -134,17 +144,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple * transformation is applied at both index and search: * {@code bits ^ (bits >> 31) & 0x7fffffff} *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link FieldData#unwrapSingleton(SortedNumericDoubleValues)} will return - * the underlying single-valued NumericDoubleValues representation, and + * the underlying single-valued NumericDoubleValues representation, and * {@link FieldData#unwrapSingletonBits(SortedNumericDoubleValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ static final class SortedNumericFloatFieldData extends AtomicDoubleFieldData { final LeafReader reader; final String field; - + SortedNumericFloatFieldData(LeafReader reader, String field) { super(0L); this.reader = reader; @@ -155,7 +165,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple public SortedNumericDoubleValues getDoubleValues() { try { SortedNumericDocValues raw = DocValues.getSortedNumeric(reader, field); - + NumericDocValues single = DocValues.unwrapSingleton(raw); if (single != null) { return FieldData.singleton(new SingleFloatValues(single), DocValues.unwrapSingletonBits(raw)); @@ -166,19 +176,19 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple throw new IllegalStateException("Cannot load doc values", e); } } - + @Override public Collection getChildResources() { return Collections.emptyList(); } } - - /** + + /** * Wraps a NumericDocValues and exposes a single 32-bit float per document. */ static final class SingleFloatValues extends NumericDoubleValues { final NumericDocValues in; - + SingleFloatValues(NumericDocValues in) { this.in = in; } @@ -188,17 +198,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return NumericUtils.sortableIntToFloat((int) in.get(docID)); } } - - /** + + /** * Wraps a SortedNumericDocValues and exposes multiple 32-bit floats per document. */ static final class MultiFloatValues extends SortedNumericDoubleValues { final SortedNumericDocValues in; - + MultiFloatValues(SortedNumericDocValues in) { this.in = in; } - + @Override public void setDocument(int doc) { in.setDocument(doc); @@ -214,7 +224,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return in.count(); } } - + /** * FieldData implementation for 64-bit double values. *

    @@ -223,17 +233,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple * transformation is applied at both index and search: * {@code bits ^ (bits >> 63) & 0x7fffffffffffffffL} *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link FieldData#unwrapSingleton(SortedNumericDoubleValues)} will return - * the underlying single-valued NumericDoubleValues representation, and + * the underlying single-valued NumericDoubleValues representation, and * {@link FieldData#unwrapSingletonBits(SortedNumericDoubleValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ static final class SortedNumericDoubleFieldData extends AtomicDoubleFieldData { final LeafReader reader; final String field; - + SortedNumericDoubleFieldData(LeafReader reader, String field) { super(0L); this.reader = reader; @@ -249,7 +259,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple throw new IllegalStateException("Cannot load doc values", e); } } - + @Override public Collection getChildResources() { return Collections.emptyList(); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index fcbbe884bd4..c5093435495 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -23,13 +23,17 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; import org.elasticsearch.index.mapper.MappedFieldType.Names; -import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 899da8f3738..7f6c9517f58 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index 3d5a01c41d1..d628ee4e961 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index aa933b4ad3e..e14d7a0cd63 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -29,7 +29,6 @@ import java.util.AbstractMap; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; -import java.util.Map; import java.util.Set; /** diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 333cda459f7..0c813142dcd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 8951ecf0f4e..29e3fa4e456 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -37,7 +37,9 @@ import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; -import java.util.*; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.index.mapper.MapperBuilders.doc; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 30df3562aec..93de39d0f9e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -44,7 +44,6 @@ import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 32e749992e6..c1383817c79 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -24,7 +24,14 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java index 0df3c06d042..75d2cb43937 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java @@ -20,7 +20,18 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BinaryFieldMapper; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 37e99e8c90c..bcb7010f753 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -20,14 +20,17 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; @@ -52,13 +55,24 @@ import org.elasticsearch.script.ScriptService; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; import java.util.stream.Collectors; -import static java.util.Collections.*; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static java.util.Collections.unmodifiableMap; +import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; /** @@ -308,7 +322,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // Before 3.0 some metadata mappers are also registered under the root object mapper // So we avoid false positives by deduplicating mappers // given that we check exact equality, this would still catch the case that a mapper - // is defined under the root object + // is defined under the root object Collection uniqueFieldMappers = Collections.newSetFromMap(new IdentityHashMap<>()); uniqueFieldMappers.addAll(fieldMappers); fieldMappers = uniqueFieldMappers; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 0ee311678ef..4fe0eb166c4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.Base64; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -45,7 +44,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.elasticsearch.index.mapper.MapperBuilders.binaryField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 44b4cbcd35e..9346ebf6b42 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 69177401db7..f4060dee821 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -38,14 +38,25 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import static org.elasticsearch.index.mapper.MapperBuilders.completionField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 18995498113..67f6a5e21b3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 9d9557c41f4..a19079c3db9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index ed537aa7e5f..618114685ae 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -36,7 +36,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import java.io.IOException; @@ -49,7 +53,7 @@ import java.util.List; public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static class Defaults { - + public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful public static final int PRECISION_STEP_16_BIT = 8; // 2tpv public static final int PRECISION_STEP_32_BIT = 8; // 4tpv @@ -64,7 +68,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM private Boolean ignoreMalformed; private Boolean coerce; - + public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { super(name, fieldType, fieldType); this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); @@ -89,7 +93,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } return Defaults.IGNORE_MALFORMED; } - + public T coerce(boolean coerce) { this.coerce = coerce; return builder; @@ -173,7 +177,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM protected Explicit ignoreMalformed; protected Explicit coerce; - + protected NumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index e455959c530..017fd6493c5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 08582c65997..0762c9a6698 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -45,8 +45,8 @@ import java.util.Map; import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index f6bd4946eb2..00d39791c9c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType.Loading; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 0bbe2fe8f1b..6f413683d63 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -47,7 +47,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import static org.elasticsearch.index.mapper.MapperBuilders.doubleField; @@ -505,4 +504,4 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index b0606f1994f..e82ecf0097a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -33,8 +33,8 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index c529db5183e..15fbd6fd123 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -30,8 +30,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index d8a7c752e6f..598e1d365fd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -55,7 +55,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.elasticsearch.index.mapper.MapperBuilders.ipField; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index c2d9783fc9f..519ac0f5e24 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -30,12 +30,23 @@ import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.object; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 2fd4e914718..90030d40a05 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -26,11 +26,20 @@ import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; diff --git a/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java b/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java index bcacb3516da..a6f1d1c3291 100644 --- a/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java +++ b/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.merge; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index c7349cca3e6..50346c2d36e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.apache.lucene.queries.BoostingQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 02af9b7675a..8ae990cac65 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java index 0c5ede50ba0..de8b7b97b23 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; + import java.io.IOException; /** diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 4f9574f2981..e4dc1bcff94 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -30,7 +30,12 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; /** * A query that will return only documents matching specific ids (and a type). diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java index af212e7aedf..31054013a8e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.MoreLikeThisQueryParser.Field; import org.elasticsearch.index.search.MatchQuery; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 16107d4ec97..59e04e40951 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -37,7 +37,12 @@ import org.elasticsearch.index.query.support.QueryParsers; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; /** * A query that parses a query string and runs it. There are two modes that this operates. The first, diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/core/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 1ebf44e23f2..c0fc8b80928 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.index.query; -import java.util.Locale; - import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import java.util.Locale; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *

      diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 6f14f15d3f2..353dbd668ac 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -28,8 +28,12 @@ import org.apache.lucene.util.Bits; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.*; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 7627644e750..fcf1a12eba7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -23,7 +23,12 @@ import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import java.io.IOException; @@ -299,7 +304,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp // For further reasoning see // https://issues.apache.org/jira/browse/LUCENE-4021 return (Objects.equals(locale.toLanguageTag(), other.locale.toLanguageTag()) - && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) + && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) && Objects.equals(lenient, other.lenient) && Objects.equals(analyzeWildcard, other.analyzeWildcard)); } diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java index 7f64eb3ccf2..841508506c7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java @@ -21,7 +21,8 @@ package org.elasticsearch.index.query; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import java.io.IOException; import java.util.HashMap; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index 5c1fe0ede88..4fcb7cfeec0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -37,7 +37,11 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; +import org.elasticsearch.index.fielddata.NumericDoubleValues; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 1b4dbaea3e2..4075ae54dbb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -20,13 +20,13 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index d5c260f9616..766911bb747 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index d738f3a259d..6822ab3e240 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -22,13 +22,18 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java index df76f14e8d8..52324b9654b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.query.functionscore; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java index 06d6ba8f560..028e9495799 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java @@ -19,10 +19,10 @@ package org.elasticsearch.index.query.functionscore.fieldvaluefactor; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java index 9b062d42c1c..a3772c1ecbe 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java @@ -21,10 +21,10 @@ package org.elasticsearch.index.query.functionscore.random; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java index d0dbcccb625..b12ba5ad82c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java @@ -21,9 +21,9 @@ package org.elasticsearch.index.query.functionscore.script; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; @@ -96,4 +96,4 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser implements IndexEventListener { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index 80c18ef3d63..c0270e71721 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -37,7 +37,12 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * The source recovery accepts recovery requests from other peer shards and start the recovery process from this diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 4057af00841..1410f499078 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -36,13 +36,14 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.CancellableThreads.Interruptable; import org.elasticsearch.index.engine.RecoveryEngineException; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IllegalIndexShardStateException; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardClosedException; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.translog.Translog; @@ -60,7 +61,6 @@ import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.StreamSupport; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 32e644ab7b8..f7e683b8f14 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -45,10 +45,21 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.mapper.MapperException; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IllegalIndexShardStateException; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardClosedException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.FutureTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 45f2f91b0be..4a76d262130 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -19,7 +19,13 @@ package org.elasticsearch.indices.store; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -39,7 +45,13 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index d963ea24303..65902b443e9 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -43,8 +43,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -55,7 +55,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 0eed82561a3..a7f931129fe 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -41,15 +41,15 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.indices.IndicesService; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index 861297f3482..bf9a04ed734 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.unit.TimeValue; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import java.util.*; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.concurrent.TimeUnit; /** @@ -94,7 +99,7 @@ public class HotThreads { threadName.equals("Reference Handler")) { return true; } - + for (StackTraceElement frame : threadInfo.getStackTrace()) { String className = frame.getClassName(); String methodName = frame.getMethodName(); diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java index c695e265ac5..276ef537bc8 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java @@ -29,7 +29,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; -import java.lang.management.*; +import java.lang.management.BufferPoolMXBean; +import java.lang.management.ClassLoadingMXBean; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryUsage; +import java.lang.management.RuntimeMXBean; +import java.lang.management.ThreadMXBean; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 7bede53e7ec..11db520ed7d 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,8 +19,6 @@ package org.elasticsearch.node.internal; -import java.nio.charset.StandardCharsets; - import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Booleans; @@ -35,6 +33,7 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java index 5d581178622..e1488ef8cde 100644 --- a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java +++ b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java @@ -22,7 +22,12 @@ package org.elasticsearch.percolator; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CloseableThreadLocal; diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 8df956f2cea..7e2103fff93 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; - import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; @@ -73,7 +72,6 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index eb33f3832b4..0f0b571ff2d 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -19,7 +19,6 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.IntObjectHashMap; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.memory.ExtendedMemoryIndex; diff --git a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java index 094201c6184..33a8dc813fe 100644 --- a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java +++ b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java @@ -19,10 +19,16 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.FloatArrayList; - import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 1db0ac966d7..1efc151836d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -23,12 +23,9 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexService; -import java.io.Closeable; import java.util.Collection; import java.util.Collections; -import java.util.List; /** * An extension point allowing to plug in custom functionality. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 1ebe7813d3c..7cd50409fb6 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -20,7 +20,11 @@ package org.elasticsearch.plugins; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.*; +import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchCorruptionException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; @@ -36,9 +40,23 @@ import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; -import java.nio.file.*; -import java.nio.file.attribute.*; -import java.util.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.GroupPrincipal; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.UserPrincipal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 5ebd43d5026..50938a1916c 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -46,7 +46,6 @@ import java.net.URL; import java.net.URLClassLoader; import java.nio.file.DirectoryStream; import java.nio.file.Files; -import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java b/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java index fe513619d06..8b4e0976e18 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java b/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java index 3f8429f1f15..2574ff3b092 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 1ead50d1584..91600488332 100644 --- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.repositories; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 2648a183362..e941492e7f0 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -65,8 +65,6 @@ import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; -import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Collections; diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index b15da26e3f6..6d8400f648c 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -33,10 +33,17 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.CorruptStateException; -import java.io.*; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.Locale; /** diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 478158282d6..33f9d4e7c30 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -33,7 +33,6 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.nio.file.Path; -import java.nio.file.Paths; /** * Shared file system implementation of the BlobStoreRepository diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 1ae1e575692..294338c0501 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -19,7 +19,11 @@ package org.elasticsearch.rest; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; import org.elasticsearch.common.ParseFieldMatcher; diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index fc944f49460..ba7e54c123f 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -21,10 +21,8 @@ package org.elasticsearch.rest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/core/src/main/java/org/elasticsearch/rest/RestFilter.java b/core/src/main/java/org/elasticsearch/rest/RestFilter.java index dd86c026b75..e3ff44ff1fc 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestFilter.java +++ b/core/src/main/java/org/elasticsearch/rest/RestFilter.java @@ -19,8 +19,6 @@ package org.elasticsearch.rest; -import org.elasticsearch.ElasticsearchException; - import java.io.Closeable; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java index 98d4feca189..badf6f6de58 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java @@ -21,12 +21,15 @@ package org.elasticsearch.rest.action.admin.cluster.health; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.util.Locale; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java index c1a39cc1432..24c4c449410 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java @@ -27,7 +27,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestResponseListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index 910d3dcc833..786891d330d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java index 758ee34505a..36e02ba4599 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteReposito import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.deleteRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java index 9f09081417a..fd347ccd332 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.client.Requests.getRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java index b974a9be0fb..feeeeb77aba 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResp import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.putRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java index 6e3a889f691..c0c7ad5b953 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.verifyRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java index 7d5d2c9d5ff..387728918a6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java @@ -30,7 +30,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index b7b5064c096..fc4432a658f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -29,7 +29,13 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java index b30f6848f30..aa84606b076 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java @@ -28,7 +28,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java index a797a474eb6..ee68c1bbb7a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java index 4bdef344d3f..bf9dd4a0119 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.createSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java index 7e88817ff7f..66b5a4188c0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.deleteSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java index 21aadd174ec..123798cf995 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.getSnapshotsRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java index 4dc2eed7794..028285d3064 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotR import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.restoreSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java index f22300432a5..b60a740a15d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.snapshotsStatusRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java index 7f72326bf0a..f28ecfe4888 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java @@ -31,7 +31,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.EnumSet; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java index 975c460dda8..b14293ba310 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java index 0277a084619..5d9eac430b5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java index 5648abc7f14..f62d6febee5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java @@ -28,7 +28,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java index b9102a0048f..6748cc2509d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java index 4a1b0bf938b..aa62ee471dc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java @@ -31,7 +31,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java index 528799cba28..4c774b58645 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.common.Strings.isAllOrWildcard; @@ -89,4 +94,4 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler { }); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java index 93342fbf0ba..fce40123b68 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java @@ -28,7 +28,11 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java index 42ea9cba99c..cc06a14b8de 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java index 940b6c1cefa..091fbc1680d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java index 2d54e360f41..41a272cc8b7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java index 43201592e31..0851fb867b7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java index 8ea4e633bc1..6843f5c5ce2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java @@ -26,7 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java index a03a7f0fe1d..f1f227edfdd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java @@ -25,7 +25,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java index 97a951d2ad2..47c0451adfc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index 0b8ffcf94da..4fe893bd411 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java index 730276c1a2b..d8ef7bace3a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java @@ -27,10 +27,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; -import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java index da2130adeaf..86336ccf971 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java @@ -19,14 +19,12 @@ package org.elasticsearch.rest.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java index 3cfb6f6da64..48fa60cb4b0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java @@ -34,7 +34,12 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java index 4189d490310..3ceecbfd3a9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.putMappingRequest; @@ -51,7 +54,7 @@ public class RestPutMappingAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_mapping", this); controller.registerHandler(POST, "/{index}/_mapping/{type}", this); controller.registerHandler(POST, "/_mapping/{type}", this); - + //register the same paths, but with plural form _mappings controller.registerHandler(PUT, "/{index}/_mappings/", this); controller.registerHandler(PUT, "/{index}/{type}/_mappings", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java index e81bca30f6a..cb22f81ba46 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java index 9d470c4b051..e46831e81e8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java index 85775d55394..e552b13316a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java index 7356d1b759f..a233c75da58 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java index a413b145c73..f27897aa731 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java index bd7e62abf48..1a8ba58306d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices.settings; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java index a776efb63a7..e2dc64cc475 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java @@ -20,15 +20,20 @@ package org.elasticsearch.rest.action.admin.indices.shards; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; -import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index 4431ba5f4b3..891afd6b8cf 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java index 2b6ebbc6023..a4c1869609b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplat import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java index a4f8805539d..0838fa887e6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java @@ -23,7 +23,12 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResp import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java index e555cfd0fac..45f8a674dde 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRespo import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java index 0a4592f387f..81bdaf7536b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java index 1d3fae87616..4fe07564031 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java index 67e01017678..26f1186f550 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.warmer.IndexWarmersMetaData; diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java index 536b73ba2b5..37ce03bac70 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java @@ -31,7 +31,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.POST; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java index 95873b8878c..895211a0979 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java @@ -23,7 +23,12 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import static org.elasticsearch.rest.action.support.RestTable.buildHelpWidths; import static org.elasticsearch.rest.action.support.RestTable.pad; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index 3d2b283714f..b322fef9f30 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java index 938743bf3fa..7acc0763e85 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.ObjectIntScatterMap; - import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -35,7 +34,10 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java index cebcf3159ab..337684769f3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java @@ -22,7 +22,12 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java index a881f72104b..486e04cc343 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java @@ -19,8 +19,8 @@ package org.elasticsearch.rest.action.cat; -import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.ObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongMap; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -29,7 +29,10 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 4dd8033c0bd..aec087523b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; @@ -31,6 +30,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; @@ -38,7 +38,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java index 7c2f4ef61a3..f43c8c019d2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java index 2ac08fd74d4..d67d6bc2d28 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.rest.action.cat; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 208b7e6c374..e86132a909e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -53,7 +53,10 @@ import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.monitor.process.ProcessStats; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java index 136997c5c89..b563450f8f6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index b52f8e6fc10..34e05223657 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; @@ -31,7 +30,11 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.plugins.PluginInfo; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 6574a01ae28..692d5bebbc9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -36,7 +36,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java index 1ce78e33e3f..834b3d391b1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index e583ed36274..4336c9db2d4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -21,15 +21,15 @@ package org.elasticsearch.rest.action.delete; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index 086446fc53f..0e472bb0bf3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java index 647728b24ab..c314c4325d6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java @@ -28,7 +28,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; @@ -89,4 +95,4 @@ public class RestFieldStatsAction extends BaseRestHandler { } }); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java index 9ed5c4d5fe9..e85eef48574 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index c0e45fc6aeb..ff6c04a6d12 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -26,7 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java index d0c1433bb47..f32c07f20f1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java @@ -25,7 +25,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java index 14e4496085b..440312b7cb9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 310ce0a1248..13a93299187 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -21,22 +21,24 @@ package org.elasticsearch.rest.action.index; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; -import static org.elasticsearch.rest.RestStatus.*; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; /** * diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java index df99979eb6b..42de9b898ae 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java index d9a1d9f4cb4..879ec78d754 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java index 4ee543f5362..052fa42104b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java index 4d8d678a304..b492e7c513f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; @@ -82,4 +88,4 @@ public class RestDeleteIndexedScriptAction extends BaseRestHandler { static final XContentBuilderString _ID = new XContentBuilderString("_id"); static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java index e45e7cdb625..a4c6784d415 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java index 33145be6a47..ed440c2b9fa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java @@ -27,14 +27,22 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; -import static org.elasticsearch.rest.RestStatus.*; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; +import static org.elasticsearch.rest.RestStatus.CREATED; +import static org.elasticsearch.rest.RestStatus.OK; /** * diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index 5efd2584b38..b2a2905585c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -29,7 +29,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 6fb72931804..eb7e0465902 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -30,7 +30,10 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.search.Scroll; diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index 8bf360dc36c..2841bbe1fe3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -19,9 +19,6 @@ package org.elasticsearch.rest.action.suggest; -import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -41,6 +38,10 @@ import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.suggest.Suggest; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; + /** * */ diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java index f1c15c69ba9..a59dc3d47b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java @@ -19,14 +19,11 @@ package org.elasticsearch.rest.action.support; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.support.RestActionListener; -import org.elasticsearch.rest.action.support.RestBuilderListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java index 211635ef98f..9ebdc7b5d07 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java @@ -25,8 +25,6 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; -import java.io.IOException; - /** * An action listener that requires {@link #processResponse(Object)} to be implemented * and will automatically handle failures. diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 14935f5f9a5..9c0bb61d748 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -27,8 +27,17 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.index.query.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java index acdfaab737b..b5c9f2bcca8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java @@ -19,15 +19,9 @@ package org.elasticsearch.rest.action.support; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; -import java.io.IOException; - /** * A REST enabled action listener that has a basic onFailure implementation, and requires * sub classes to only implement {@link #buildResponse(Object)}. diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 3808e58a527..13b638910ae 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -30,10 +30,17 @@ import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; /** */ diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java index 0d23645afda..1523d299f03 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java @@ -21,7 +21,10 @@ package org.elasticsearch.rest.action.template; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; import org.elasticsearch.script.Template; diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java index 400869fff0e..fe897f9b09f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index f59c329fbc3..24264ca292e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -22,22 +22,16 @@ package org.elasticsearch.rest.action.update; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; diff --git a/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java b/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java index 658131202a0..98e5ab11fb7 100644 --- a/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java +++ b/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java @@ -21,7 +21,11 @@ package org.elasticsearch.script; import org.apache.lucene.search.Scorer; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.search.lookup.*; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.search.lookup.LeafFieldsLookup; +import org.elasticsearch.search.lookup.LeafIndexLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.Map; @@ -82,7 +86,7 @@ public abstract class AbstractSearchScript extends AbstractExecutableScript impl protected final SourceLookup source() { return lookup.source(); } - + /** * Allows to access statistics on terms and fields. */ diff --git a/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java b/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java index b89e8603227..66af94a07b8 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java @@ -27,7 +27,12 @@ import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; public class ScriptParameterParser { @@ -176,7 +181,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName(); putParameterValue(coreParameterName, value, ScriptType.INLINE); - + } } for (ParseField parameter : fileParameters) { @@ -184,7 +189,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName().replace(FILE_SUFFIX, ""); putParameterValue(coreParameterName, value, ScriptType.FILE); - + } } for (ParseField parameter : indexedParameters) { @@ -192,7 +197,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName().replace(INDEXED_SUFFIX, ""); putParameterValue(coreParameterName, value, ScriptType.INDEXED); - + } } } diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 8096213b8fa..93659cf8856 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -20,7 +20,12 @@ package org.elasticsearch.search; -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index b84a5804c05..6f16d13bd92 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -21,7 +21,6 @@ package org.elasticsearch.search; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; import org.elasticsearch.search.action.SearchServiceTransportAction; @@ -65,8 +64,11 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; -import org.elasticsearch.search.aggregations.bucket.terms.*; +import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; +import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; @@ -127,7 +129,6 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParserMapper; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffParser; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator; import org.elasticsearch.search.controller.SearchPhaseController; @@ -385,7 +386,7 @@ public class SearchModule extends AbstractModule { SumBucketPipelineAggregator.registerStreams(); StatsBucketPipelineAggregator.registerStreams(); ExtendedStatsBucketPipelineAggregator.registerStreams(); - PercentilesBucketPipelineAggregator.registerStreams(); + PercentilesBucketPipelineAggregator.registerStreams(); MovAvgPipelineAggregator.registerStreams(); CumulativeSumPipelineAggregator.registerStreams(); BucketScriptPipelineAggregator.registerStreams(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchParseException.java b/core/src/main/java/org/elasticsearch/search/SearchParseException.java index 49f8a9c7cf0..c0a9a370270 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchParseException.java +++ b/core/src/main/java/org/elasticsearch/search/SearchParseException.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.internal.SearchContext; @@ -86,7 +85,7 @@ public class SearchParseException extends SearchContextException { /** * Line number of the location of the error - * + * * @return the line number or -1 if unknown */ public int getLineNumber() { @@ -95,7 +94,7 @@ public class SearchParseException extends SearchContextException { /** * Column number of the location of the error - * + * * @return the column number or -1 if unknown */ public int getColumnNumber() { diff --git a/core/src/main/java/org/elasticsearch/search/SearchPhase.java b/core/src/main/java/org/elasticsearch/search/SearchPhase.java index bdab128fecb..85a75b9738d 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/SearchPhase.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.internal.SearchContext; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 99f9b0ea0d7..6bfd3f08a33 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -80,16 +79,29 @@ import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.fetch.*; +import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.QueryFetchSearchResult; +import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; +import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.internal.*; +import org.elasticsearch.search.internal.DefaultSearchContext; +import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; +import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.Profilers; -import org.elasticsearch.search.query.*; +import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.query.QuerySearchRequest; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.QuerySearchResultProvider; +import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java index c648436c3a9..87a2114a788 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.text.Text; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java b/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java index 0e34de359db..6e2bdf932f4 100644 --- a/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java +++ b/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java @@ -34,7 +34,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.fetch.*; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.QueryFetchSearchResult; +import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; +import org.elasticsearch.search.fetch.ShardFetchRequest; +import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; @@ -42,7 +46,11 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index 0b51a53be81..4f37ec3cf8a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -43,7 +42,7 @@ public abstract class DeferringBucketCollector extends BucketCollector { public void setDeferredCollector(Iterable deferredCollectors) { this.collector = BucketCollector.wrap(deferredCollectors); } - + public final void replay(long... selectedBuckets) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java index 0678338fcf7..537a02909a9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java @@ -21,7 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.children; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; @@ -41,10 +45,8 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import java.io.IOException; import java.util.Arrays; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Set; // The RecordingPerReaderBucketCollector assumes per segment recording which isn't the case for this // aggregation, for this reason that collector can't be used diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java index 192d624b5e0..48702dab230 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.filter; import org.apache.lucene.search.MatchAllDocsQuery; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.Aggregator; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java index e415a025a4c..f685482636f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation.InternalBucket; import org.elasticsearch.search.aggregations.bucket.BucketStreamContext; import org.elasticsearch.search.aggregations.bucket.BucketStreams; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index 343d335cfa2..ef2915fc8ba 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -20,16 +20,13 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.util.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 109301fdbff..6473b5ae7f2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.GeoHashUtils; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.MultiGeoPointValues; @@ -100,7 +99,7 @@ public class GeoHashGridParser implements Aggregator.Parser { } if (shardSize < 0) { - //Use default heuristic to avoid any wrong-ranking caused by distributed counting + //Use default heuristic to avoid any wrong-ranking caused by distributed counting shardSize = BucketUtils.suggestShardSideQueueSize(requiredSize, context.numberOfShards()); } @@ -208,4 +207,4 @@ public class GeoHashGridParser implements Aggregator.Parser { } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java index 75d089ebbc8..e4c3fa2a521 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java @@ -18,14 +18,13 @@ */ package org.elasticsearch.search.aggregations.bucket.geogrid; -import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.GeoHashUtils; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 4010858f259..1b9363caa57 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.nested; import com.carrotsearch.hppc.LongIntHashMap; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; @@ -79,8 +78,8 @@ public class ReverseNestedAggregator extends SingleBucketAggregator { // fast forward to retrieve the parentDoc this childDoc belongs to final int parentDoc = parentDocs.nextSetBit(childDoc); assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS; - - int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket); + + int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket); if (bucketOrdToLastCollectedParentDoc.indexExists(keySlot)) { int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.indexGet(keySlot); if (parentDoc > lastCollectedParentDoc) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java index a50c1c109f3..e20d1ac931f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java @@ -32,8 +32,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.ip.IpFieldMapper.MAX_IP; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java index 8b0862fed29..dc1e2a65d37 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.range.ipv4; import org.elasticsearch.common.network.Cidrs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 00b10ee63c3..8cb980954cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -22,7 +22,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.BestDocsDeferringCollector; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; @@ -39,7 +45,7 @@ import java.util.Map; /** * Aggregate on only the top-scoring docs on a shard. - * + * * TODO currently the diversity feature of this agg offers only 'script' and * 'field' as a means of generating a de-dup value. In future it would be nice * if users could use any of the "bucket" aggs syntax (geo, date histogram...) @@ -131,8 +137,8 @@ public class SamplerAggregator extends SingleBucketAggregator { public String toString() { return parseField.getPreferredName(); } - } - + } + protected final int shardSize; protected BestDocsDeferringCollector bdd; @@ -213,7 +219,7 @@ public class SamplerAggregator extends SingleBucketAggregator { return new DiversifiedNumericSamplerAggregator(name, shardSize, factories, context, parent, pipelineAggregators, metaData, (Numeric) valuesSource, maxDocsPerValue); } - + if (valuesSource instanceof ValuesSource.Bytes) { ExecutionMode execution = null; if (executionHint != null) { @@ -231,7 +237,7 @@ public class SamplerAggregator extends SingleBucketAggregator { return execution.create(name, factories, shardSize, maxDocsPerValue, valuesSource, context, parent, pipelineAggregators, metaData); } - + throw new AggregationExecutionException("Sampler aggregation cannot be applied to field [" + config.fieldContext().field() + "]. It can only be applied to numeric or string fields."); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 536c3945e8c..399e85728af 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lucene.index.FilterableTermsEnum; import org.elasticsearch.common.lucene.index.FreqTermsEnum; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; @@ -198,7 +202,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac pipelineAggregators, metaData); } - + if ((includeExclude != null) && (includeExclude.isRegexBased())) { throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude " + "settings as they can only be applied to string fields. Use an array of numeric values for include/exclude clauses used to filter numeric fields"); @@ -223,12 +227,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac /** * Creates the TermsEnum (if not already created) and must be called before any calls to getBackgroundFrequency - * @param context The aggregation context + * @param context The aggregation context * @return The number of documents in the index (after an optional filter might have been applied) */ public long prepareBackground(AggregationContext context) { if (termsEnum != null) { - // already prepared - return + // already prepared - return return termsEnum.getNumDocs(); } SearchContext searchContext = context.searchContext(); @@ -238,7 +242,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac // Setup a termsEnum for sole use by one aggregator termsEnum = new FilterableTermsEnum(reader, indexedFieldName, PostingsEnum.NONE, filter); } else { - // When we have > 1 agg we have possibility of duplicate term frequency lookups + // When we have > 1 agg we have possibility of duplicate term frequency lookups // and so use a TermsEnum that caches results of all term lookups termsEnum = new FreqTermsEnum(reader, indexedFieldName, true, false, filter, searchContext.bigArrays()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index b6d1d56d07b..9efea000512 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -30,9 +30,13 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java index ff236460e72..92baa43e6b3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java index 64d2ae659e0..198f129c28a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; -import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.io.stream.StreamInput; + import java.io.IOException; import java.util.Collections; import java.util.HashMap; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index 4f9faae962c..776e49746e1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -26,9 +26,9 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -154,7 +154,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { } // replay any deferred collections runDeferredCollections(survivingBucketOrds); - + // Now build the aggs for (int i = 0; i < list.length; i++) { final StringTerms.Bucket bucket = (StringTerms.Bucket)list[i]; @@ -162,7 +162,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { bucket.aggregations = bucketAggregations(bucket.bucketOrd); bucket.docCountError = 0; } - + return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(), metaData()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index f6948649785..a9d6a9334bb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -21,8 +21,12 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index 98abe2b464a..9c33a987a9b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.terms.support; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; - import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Terms; @@ -124,7 +123,7 @@ public class IncludeExclude { public static abstract class OrdinalsFilter { public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException; - + } static class AutomatonBackedOrdinalsFilter extends OrdinalsFilter { @@ -137,7 +136,7 @@ public class IncludeExclude { /** * Computes which global ordinals are accepted by this IncludeExclude instance. - * + * */ @Override public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException { @@ -153,7 +152,7 @@ public class IncludeExclude { } } - + static class TermListBackedOrdinalsFilter extends OrdinalsFilter { private final SortedSet includeValues; @@ -173,7 +172,7 @@ public class IncludeExclude { if (ord >= 0) { acceptedGlobalOrdinals.set(ord); } - } + } } else { // default to all terms being acceptable acceptedGlobalOrdinals.set(0, acceptedGlobalOrdinals.length()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java index 3834a65a3ad..b99db25a655 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.util.GeoHashUtils; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasables; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java index e264cf694dc..0b300403c9d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; import com.carrotsearch.hppc.DoubleArrayList; - import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java index 4bbb407f788..803123ffc59 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.search.aggregations.metrics.MetricsAggregationBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java index d80b087930c..6fbc6f8c6d6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java @@ -22,12 +22,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder; +import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder; import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java index 1e5dd46eca6..88a1f42b174 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser; @@ -64,4 +64,4 @@ public abstract class BucketMetricsBuilder> ex protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java index 287fb7b3402..3cf084b239a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java index 683db6c7d68..4cd584a0b03 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java index 92e8b01f43b..24e820481a3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java @@ -31,7 +31,10 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index e5ccbf6971a..76cb15ed46d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -41,7 +41,11 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.StreamSupport; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java index ab077634222..50b4578346d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.aggregations.pipeline.derivative; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index 48686b9a6b9..4f7034b633f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index 55611a0d000..fe0321bf0fc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -29,7 +29,8 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.Collection; +import java.util.Map; /** * Calculate a doubly exponential weighted moving average diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java index 176d4b06f3b..55cf6be073c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -32,7 +32,11 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; /** * Calculate a triple exponential weighted moving average diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java index f1755132072..4bfac9d44cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.SearchParseException; import java.io.IOException; import java.text.ParseException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java index 2a8f27d127c..a88730e582f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index 0464dc8c1d8..b03bc8d6833 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.search.aggregations.support; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.Bits; diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 3ea2d604b8a..fe7e6064fc3 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index 835e6e71425..ef16a03831d 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -52,10 +52,10 @@ import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.InternalProfileShardResults; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.profile.ProfileShardResult; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java b/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java index d8330e5ec15..d762540caab 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; @@ -72,8 +71,8 @@ public class AggregatedDfs implements Streamable { termStatistics = HppcMaps.newMap(size); for (int i = 0; i < size; i++) { Term term = new Term(in.readString(), in.readBytesRef()); - TermStatistics stats = new TermStatistics(in.readBytesRef(), - in.readVLong(), + TermStatistics stats = new TermStatistics(in.readBytesRef(), + in.readVLong(), DfsSearchResult.subOne(in.readVLong())); termStatistics.put(term, stats); } @@ -84,7 +83,7 @@ public class AggregatedDfs implements Streamable { @Override public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(termStatistics.size()); - + for (ObjectObjectCursor c : termStatistics()) { Term term = (Term) c.key; out.writeString(term.field()); diff --git a/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index e1b98c413e1..fab8323d04d 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; diff --git a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index c86af528bfc..6e93e410587 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; @@ -127,12 +126,12 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes } this.termStatistics = readTermStats(in, terms); readFieldStats(in, fieldStatistics); - + maxDoc = in.readVInt(); } - + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -146,7 +145,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes writeFieldStats(out, fieldStatistics); out.writeVInt(maxDoc); } - + public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap fieldStatistics) throws IOException { out.writeVInt(fieldStatistics.size()); @@ -160,20 +159,20 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes out.writeVLong(addOne(statistics.sumDocFreq())); } } - + public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException { out.writeVInt(termStatistics.length); for (TermStatistics termStatistic : termStatistics) { writeSingleTermStats(out, termStatistic); } } - + public static void writeSingleTermStats(StreamOutput out, TermStatistics termStatistic) throws IOException { assert termStatistic.docFreq() >= 0; out.writeVLong(termStatistic.docFreq()); - out.writeVLong(addOne(termStatistic.totalTermFreq())); + out.writeVLong(addOne(termStatistic.totalTermFreq())); } - + public static ObjectObjectHashMap readFieldStats(StreamInput in) throws IOException { return readFieldStats(in, null); } @@ -215,7 +214,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes return termStatistics; } - + /* * optional statistics are set to -1 in lucene by default. * Since we are using var longs to encode values we add one to each value @@ -225,8 +224,8 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes assert value + 1 >= 0; return value + 1; } - - + + /* * See #addOne this just subtracting one and asserts that the actual value * is positive. diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 04890700be8..6addf2dc301 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -55,7 +54,13 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java index 140d60d4eeb..74a37c7e8be 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.internal.SearchContext; /** diff --git a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java index 7bcfc045ca9..f3271f933fe 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; -import org.elasticsearch.transport.TransportResponse; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java index 803ab737cbc..0d524ed3e3e 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java @@ -22,9 +22,7 @@ package org.elasticsearch.search.fetch; import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.search.type.ParsedScrollId; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java index 3d1a2498f2f..4f746c47ec6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.fetch.fielddata; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.FetchSubPhaseContext; import org.elasticsearch.search.fetch.FetchSubPhaseParseElement; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index e1884e36609..371e897951e 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -23,7 +23,19 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopDocsCollector; +import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java index 54366bee8c9..30b8d15d93d 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java @@ -22,7 +22,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; /** * An extensions point and registry for all the highlighters a node supports. diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index ee824ee13c3..eb17daec5f1 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -27,7 +27,14 @@ import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.S import org.apache.lucene.search.vectorhighlight.FragmentsBuilder; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.Version; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.EdgeNGramTokenFilterFactory; +import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; +import org.elasticsearch.index.analysis.NGramTokenFilterFactory; +import org.elasticsearch.index.analysis.NGramTokenizerFactory; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory; import org.elasticsearch.index.mapper.FieldMapper; import java.util.Comparator; @@ -41,7 +48,7 @@ public final class FragmentBuilderHelper { private FragmentBuilderHelper() { // no instance } - + /** * Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to * {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter} @@ -70,7 +77,7 @@ public final class FragmentBuilderHelper { return fragInfo; } } - + private static int compare(int x, int y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } @@ -82,19 +89,19 @@ public final class FragmentBuilderHelper { } if (analyzer instanceof CustomAnalyzer) { final CustomAnalyzer a = (CustomAnalyzer) analyzer; - if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory - || (a.tokenizerFactory() instanceof NGramTokenizerFactory + if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory + || (a.tokenizerFactory() instanceof NGramTokenizerFactory && !((NGramTokenizerFactory)a.tokenizerFactory()).version().onOrAfter(Version.LUCENE_4_2))) { // ngram tokenizer is broken before 4.2 return true; } TokenFilterFactory[] tokenFilters = a.tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { - if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory + if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory || tokenFilterFactory instanceof EdgeNGramTokenFilterFactory) { return true; } - if (tokenFilterFactory instanceof NGramTokenFilterFactory + if (tokenFilterFactory instanceof NGramTokenFilterFactory && !((NGramTokenFilterFactory)tokenFilterFactory).version().onOrAfter(Version.LUCENE_4_2)) { // ngram token filter is broken before 4.2 return true; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java index 71393fdf190..af914cebf6c 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.highlight.vectorhighlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; @@ -29,7 +29,6 @@ import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -67,7 +66,7 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder } return fields; } - + @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java index 7a12b449c97..222f00a5597 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java @@ -20,13 +20,12 @@ package org.elasticsearch.search.highlight.vectorhighlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -66,5 +65,5 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { } return fields; } - + } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index a7bacb64d94..c4df30716c7 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -22,7 +22,14 @@ package org.elasticsearch.search.internal; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCache; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.Weight; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 2d3f6590629..8190d5257d9 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -20,7 +20,12 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -35,7 +40,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -57,15 +61,18 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.query.QueryPhaseExecutionException; -import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.profile.Profilers; +import org.elasticsearch.search.query.QueryPhaseExecutionException; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index fcac5b1cc8b..c6afe325bb3 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -845,4 +844,4 @@ public class InternalSearchHit implements SearchHit { } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 4e4e9dd5dd7..5c4e70c1004 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -60,7 +60,10 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable { diff --git a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index 07f72379764..ec4b5a041ff 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.lookup; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java b/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java index 490180e6925..21803983d29 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.lookup; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.CollectionStatistics; import org.elasticsearch.common.util.MinimalMap; @@ -93,7 +92,7 @@ public class IndexField extends MinimalMap { /* * Returns a TermInfo object that can be used to access information on * specific terms. flags can be set as described in TermInfo. - * + * * TODO: here might be potential for running time improvement? If we knew in * advance which terms are requested, we could provide an array which the * user could then iterate over. diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java index e6052ff5095..27cab01e606 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; /** diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java index 4bc8a85a781..d7afd9c2fa0 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java @@ -21,7 +21,11 @@ package org.elasticsearch.search.profile; import org.apache.lucene.search.Query; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Deque; +import java.util.List; +import java.util.Map; import java.util.concurrent.LinkedBlockingDeque; /** diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java index 6e005babb3c..c472aa791cf 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java @@ -26,7 +26,9 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; /** * A container class to hold the profile results for a single shard in the request. diff --git a/core/src/main/java/org/elasticsearch/search/profile/Profiler.java b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java index bf0c9ec01b6..011b1593d35 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/Profiler.java +++ b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java @@ -21,7 +21,9 @@ package org.elasticsearch.search.profile; import org.apache.lucene.search.Query; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; /** * This class acts as a thread-local storage for profiling a query. It also diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 08ff849871f..5352fb02895 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -52,7 +52,10 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.*; +import org.elasticsearch.search.profile.CollectorResult; +import org.elasticsearch.search.profile.InternalProfileCollector; +import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.sort.SortParseElement; diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index 2a272f742e3..9fddf590ca4 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -43,7 +43,6 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java index db60d58953a..5ec92264389 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -36,7 +35,12 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Top level suggest result, containing the result for each suggestion. @@ -80,19 +84,19 @@ public class Suggest implements Iterable>> iterator() { return suggestions.iterator(); } - + /** * The number of suggestions in this {@link Suggest} result */ public int size() { return suggestions.size(); } - + public >> T getSuggestion(String name) { if (suggestions.isEmpty() || name == null) { return null; @@ -156,7 +160,7 @@ public class Suggest implements Iterable> group(Map> groupedSuggestions, Suggest suggest) { for (Suggestion> suggestion : suggest) { List list = groupedSuggestions.get(suggestion.getName()); @@ -193,8 +197,8 @@ public class Suggest implements Iterable implements Iterable, Streamable, ToXContent { - - + + public static final int TYPE = 0; protected String name; protected int size; @@ -211,7 +215,7 @@ public class Suggest implements Iterable sortComparator() { return COMPARATOR; } - + /** * Trims the number of options per suggest text term to the requested size. * For internal usage. @@ -293,12 +297,12 @@ public class Suggest implements Iterable comparator) { CollectionUtil.timSort(options, comparator); } @@ -481,7 +485,7 @@ public class Suggest implements Iterable LUCENE_FREQUENCY = new SuggestWordFrequencyComparator(); public static final Comparator SCORE_COMPARATOR = SuggestWordQueue.DEFAULT_COMPARATOR; - + private SuggestUtils() { // utils!! } - + public static DirectSpellChecker getDirectSpellChecker(DirectSpellcheckerSettings suggestion) { - + DirectSpellChecker directSpellChecker = new DirectSpellChecker(); directSpellChecker.setAccuracy(suggestion.accuracy()); Comparator comparator; @@ -79,7 +88,7 @@ public final class SuggestUtils { directSpellChecker.setLowerCaseTerms(false); return directSpellChecker; } - + public static BytesRef join(BytesRef separator, BytesRefBuilder result, BytesRef... toJoin) { result.clear(); for (int i = 0; i < toJoin.length - 1; i++) { @@ -89,40 +98,40 @@ public final class SuggestUtils { result.append(toJoin[toJoin.length-1]); return result.get(); } - + public static abstract class TokenConsumer { protected CharTermAttribute charTermAttr; protected PositionIncrementAttribute posIncAttr; protected OffsetAttribute offsetAttr; - + public void reset(TokenStream stream) { charTermAttr = stream.addAttribute(CharTermAttribute.class); posIncAttr = stream.addAttribute(PositionIncrementAttribute.class); offsetAttr = stream.addAttribute(OffsetAttribute.class); } - + protected BytesRef fillBytesRef(BytesRefBuilder spare) { spare.copyChars(charTermAttr); return spare.get(); } - + public abstract void nextToken() throws IOException; public void end() {} } - + public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, TokenConsumer consumer, CharsRefBuilder spare) throws IOException { spare.copyUTF8Bytes(toAnalyze); return analyze(analyzer, spare.get(), field, consumer); } - + public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { try (TokenStream ts = analyzer.tokenStream( field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { return analyze(ts, consumer); } } - + /** NOTE: this method closes the TokenStream, even on exception, which is awkward * because really the caller who called {@link Analyzer#tokenStream} should close it, * but when trying that there are recursion issues when we try to use the same @@ -147,7 +156,7 @@ public final class SuggestUtils { } return numTokens; } - + public static SuggestMode resolveSuggestMode(String suggestMode) { suggestMode = suggestMode.toLowerCase(Locale.US); if ("missing".equals(suggestMode)) { @@ -178,7 +187,7 @@ public final class SuggestUtils { return new LuceneLevenshteinDistance(); } else if ("levenstein".equals(distanceVal)) { return new LevensteinDistance(); - //TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein + //TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein } else if ("jarowinkler".equals(distanceVal)) { return new JaroWinklerDistance(); } else if ("ngram".equals(distanceVal)) { @@ -187,7 +196,7 @@ public final class SuggestUtils { throw new IllegalArgumentException("Illegal distance option " + distanceVal); } } - + public static class Fields { public static final ParseField STRING_DISTANCE = new ParseField("string_distance"); public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode"); @@ -201,8 +210,8 @@ public final class SuggestUtils { public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len"); public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq"); public static final ParseField SHARD_SIZE = new ParseField("shard_size"); - } - + } + public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName, DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { if ("accuracy".equals(fieldName)) { @@ -233,10 +242,10 @@ public final class SuggestUtils { } return true; } - + public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName, SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { - + if ("analyzer".equals(fieldName)) { String analyzerName = parser.text(); Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); @@ -254,10 +263,10 @@ public final class SuggestUtils { return false; } return true; - + } - - + + public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) { // Verify options and set defaults if (suggestion.getField() == null) { @@ -276,8 +285,8 @@ public final class SuggestUtils { suggestion.setShardSize(Math.max(suggestion.getSize(), 5)); } } - - + + public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index 9eba50f478a..af54e5dfd86 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -26,7 +26,11 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; import org.elasticsearch.search.suggest.term.TermSuggester; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; /** * diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java index 08c0302f81e..e5e1b1b9199 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion; import com.carrotsearch.hppc.ObjectLongHashMap; - import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 928a1342ec1..99842ca77bb 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -39,7 +39,10 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Parses query options for {@link CompletionSuggester} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 6a0155ffb7a..75211e85205 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -28,7 +28,7 @@ import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.TopSuggestDocs; import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector; -import org.apache.lucene.util.*; +import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.fielddata.AtomicFieldData; @@ -40,7 +40,13 @@ import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.Suggester; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; public class CompletionSuggester extends Suggester { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 66c21c58162..ed3ebead7f5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -21,14 +21,19 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.Lookup; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Suggestion response for {@link CompletionSuggester} results diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index fe80f70e260..100e701c03c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -30,15 +30,19 @@ import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import java.io.IOException; -import java.util.*; - -import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality * for users as they type search terms. The implementation of the completion service uses FSTs that - * are created at index-time and so must be defined in the mapping with the type "completion" before - * indexing. + * are created at index-time and so must be defined in the mapping with the type "completion" before + * indexing. */ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder { private FuzzyOptionsBuilder fuzzyOptionsBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index 8ffd497eb3a..535151b476e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -28,7 +28,11 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import java.util.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java index 23c9ca730b9..dffbb1aa80d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java @@ -29,7 +29,13 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} that uses a simple string as a criteria diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java index ee2655ebdda..c4931265776 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java @@ -21,7 +21,10 @@ package org.elasticsearch.search.suggest.completion.context; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index b15577d6fb2..42e5cc0a157 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -28,7 +28,9 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.List; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} defines criteria that can be used to diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 87b702c2ffb..9d4bed4f664 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -32,9 +32,19 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; -import static org.elasticsearch.search.suggest.completion.context.ContextMapping.*; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_NAME; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_TYPE; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.QueryContext; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.Type; /** * ContextMappings indexes context-enabled suggestion fields diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 57283c1bd05..f2f3d10215d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -36,7 +36,13 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} that uses a geo location/area as a @@ -295,14 +301,14 @@ public class GeoContextMapping extends ContextMapping { private int precision = DEFAULT_PRECISION; private String fieldName = null; - + protected Builder(String name) { super(name); } /** * Set the precision use o make suggestions - * + * * @param precision * precision as distance with {@link DistanceUnit}. Default: * meters @@ -314,7 +320,7 @@ public class GeoContextMapping extends ContextMapping { /** * Set the precision use o make suggestions - * + * * @param precision * precision value * @param unit @@ -327,23 +333,23 @@ public class GeoContextMapping extends ContextMapping { /** * Set the precision use o make suggestions - * + * * @param meters * precision as distance in meters * @return this */ public Builder precision(double meters) { int level = GeoUtils.geoHashLevelsForPrecision(meters); - // Ceiling precision: we might return more results + // Ceiling precision: we might return more results if (GeoUtils.geoHashCellSize(level) < meters) { - level = Math.max(1, level - 1); + level = Math.max(1, level - 1); } return precision(level); } /** * Set the precision use o make suggestions - * + * * @param level * maximum length of geohashes * @return this diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java index 75cab1e8e89..da9191bf2d5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java @@ -32,7 +32,10 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.*; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_BOOST; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_NEIGHBOURS; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_PRECISION; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_VALUE; /** * Defines the query context for {@link GeoContextMapping} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java index 56c6181d612..f98822296b0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; +import java.io.IOException; + //TODO public for tests public abstract class CandidateGenerator { @@ -35,7 +35,7 @@ public abstract class CandidateGenerator { CandidateSet set = new CandidateSet(Candidate.EMPTY, createCandidate(term, true)); return drawCandidates(set); } - + public Candidate createCandidate(BytesRef term, boolean userInput) throws IOException { return createCandidate(term, frequency(term), 1.0, userInput); } @@ -47,4 +47,4 @@ public abstract class CandidateGenerator { public abstract CandidateSet drawCandidates(CandidateSet set) throws IOException; -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java index b027497479c..b3dad43ad34 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java @@ -17,13 +17,13 @@ * under the License. */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; -import java.util.Arrays; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; +import java.io.IOException; + final class CandidateScorer { private final WordScorer scorer; private final int maxNumCorrections; @@ -34,8 +34,8 @@ final class CandidateScorer { this.maxNumCorrections = maxNumCorrections; this.gramSize = gramSize; } - - + + public Correction[] findBestCandiates(CandidateSet[] sets, float errorFraction, double cutoffScore) throws IOException { if (sets.length == 0) { return Correction.EMPTY; @@ -107,7 +107,7 @@ final class CandidateScorer { } } } - + public double score(Candidate[] path, CandidateSet[] candidates) throws IOException { double score = 0.0d; for (int i = 0; i < candidates.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 8af181f0e63..5b937500d6b 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -19,7 +19,11 @@ package org.elasticsearch.search.suggest.phrase; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.SuggestMode; import org.apache.lucene.search.spell.SuggestWord; @@ -29,7 +33,12 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.search.suggest.SuggestUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; //TODO public for tests public final class DirectCandidateGenerator extends CandidateGenerator { @@ -49,7 +58,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { private final CharsRefBuilder spare = new CharsRefBuilder(); private final BytesRefBuilder byteSpare = new BytesRefBuilder(); private final int numCandidates; - + public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates) throws IOException { this(spellchecker, field, suggestMode, reader, nonErrorLikelihood, numCandidates, null, null, MultiFields.getTerms(reader, field)); } @@ -95,15 +104,15 @@ public final class DirectCandidateGenerator extends CandidateGenerator { public long internalFrequency(BytesRef term) throws IOException { if (termsEnum.seekExact(term)) { - return useTotalTermFrequency ? termsEnum.totalTermFreq() : termsEnum.docFreq(); + return useTotalTermFrequency ? termsEnum.totalTermFreq() : termsEnum.docFreq(); } return 0; } - + public String getField() { return field; } - + /* (non-Javadoc) * @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#drawCandidates(org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet, int) */ @@ -123,14 +132,14 @@ public final class DirectCandidateGenerator extends CandidateGenerator { set.addCandidates(candidates); return set; } - + protected BytesRef preFilter(final BytesRef term, final CharsRefBuilder spare, final BytesRefBuilder byteSpare) throws IOException { if (preFilter == null) { return term; } final BytesRefBuilder result = byteSpare; SuggestUtils.analyze(preFilter, term, field, new SuggestUtils.TokenConsumer() { - + @Override public void nextToken() throws IOException { this.fillBytesRef(result); @@ -138,7 +147,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { }, spare); return result.get(); } - + protected void postFilter(final Candidate candidate, final CharsRefBuilder spare, BytesRefBuilder byteSpare, final List candidates) throws IOException { if (postFilter == null) { candidates.add(candidate); @@ -148,11 +157,11 @@ public final class DirectCandidateGenerator extends CandidateGenerator { @Override public void nextToken() throws IOException { this.fillBytesRef(result); - + if (posIncAttr.getPositionIncrement() > 0 && result.get().bytesEquals(candidate.term)) { BytesRef term = result.toBytesRef(); // We should not use frequency(term) here because it will analyze the term again - // If preFilter and postFilter are the same analyzer it would fail. + // If preFilter and postFilter are the same analyzer it would fail. long freq = internalFrequency(term); candidates.add(new Candidate(result.toBytesRef(), freq, candidate.stringDistance, score(candidate.frequency, candidate.stringDistance, dictSize), false)); } else { @@ -162,19 +171,19 @@ public final class DirectCandidateGenerator extends CandidateGenerator { }, spare); } } - + private double score(long frequency, double errorScore, long dictionarySize) { return errorScore * (((double)frequency + 1) / ((double)dictionarySize +1)); } - + protected long thresholdFrequency(long termFrequency, long dictionarySize) { if (termFrequency > 0) { return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1)); } return 0; - + } - + public static class CandidateSet { public Candidate[] candidates; public final Candidate originalTerm; @@ -183,7 +192,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { this.candidates = candidates; this.originalTerm = originalTerm; } - + public void addCandidates(List candidates) { // Merge new candidates into existing ones, // deduping: @@ -223,7 +232,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { @Override public String toString() { - return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", score=" + score + ", frequency=" + frequency + + return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", score=" + score + ", frequency=" + frequency + (userInput ? ", userInput" : "" ) + "]"; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java index 0d6f8932983..4936973b237 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java @@ -18,17 +18,17 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; //TODO public for tests public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { - + private final CandidateGenerator[] candidateGenerator; private int numCandidates ; @@ -53,7 +53,7 @@ public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { } return reduce(set, numCandidates); } - + private final CandidateSet reduce(CandidateSet set, int numCandidates) { if (set.candidates.length > numCandidates) { Candidate[] candidates = set.candidates; @@ -61,14 +61,14 @@ public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { @Override public int compare(Candidate left, Candidate right) { - return Double.compare(right.score, left.score); + return Double.compare(right.score, left.score); } }); Candidate[] newSet = new Candidate[numCandidates]; System.arraycopy(candidates, 0, newSet, 0, numCandidates); set.candidates = newSet; } - + return set; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 8d2a6fdd123..04d06abbfd0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.CompiledScript; @@ -30,6 +25,11 @@ import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + class PhraseSuggestionContext extends SuggestionContext { private final BytesRef SEPARATOR = new BytesRef(" "); private float maxErrors = 0.5f; @@ -81,27 +81,27 @@ class PhraseSuggestionContext extends SuggestionContext { public void addGenerator(DirectCandidateGenerator generator) { this.generators.add(generator); } - + public List generators() { return this.generators ; } - + public void setGramSize(int gramSize) { this.gramSize = gramSize; } - + public int gramSize() { return gramSize; } - + public float confidence() { return confidence; } - + public void setConfidence(float confidence) { this.confidence = confidence; } - + public void setModel(WordScorer.WordScorerFactory scorer) { this.scorer = scorer; } @@ -134,7 +134,7 @@ class PhraseSuggestionContext extends SuggestionContext { } this.size = size; } - + public Analyzer preFilter() { return preFilter; } @@ -150,22 +150,22 @@ class PhraseSuggestionContext extends SuggestionContext { public void postFilter(Analyzer postFilter) { this.postFilter = postFilter; } - - + + } public void setRequireUnigram(boolean requireUnigram) { this.requireUnigram = requireUnigram; } - + public boolean getRequireUnigram() { return requireUnigram; } - + public void setTokenLimit(int tokenLimit) { this.tokenLimit = tokenLimit; } - + public int getTokenLimit() { return tokenLimit; } diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 14b2680d25c..e167a03f8fc 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -25,15 +25,30 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.RestoreInProgress.ShardRestoreStatus; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -43,6 +58,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -52,16 +68,35 @@ import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_INDEX_UUID; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_UPGRADED; import static org.elasticsearch.common.util.set.Sets.newHashSet; /** diff --git a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java index 1206ef53501..42eb255e8dd 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -22,7 +22,11 @@ package org.elasticsearch.snapshots; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java index 3c8cdc4d067..58faecda4a4 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java @@ -19,7 +19,6 @@ package org.elasticsearch.snapshots; -import org.elasticsearch.ElasticsearchWrapperException; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 3033a0ff801..7b9506190f2 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.snapshots; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.io.stream.StreamInput; @@ -35,6 +30,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.RestStatus; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + /** * Information about snapshot */ @@ -327,4 +327,4 @@ public class SnapshotInfo implements ToXContent, Streamable { return in.readOptionalStreamable(SnapshotInfo::new); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 91cf2af4ec5..340a7f6ce83 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -20,7 +20,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.IndexCommit; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterChangedEvent; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index bf3af7394dd..b1d16a04ead 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -21,7 +21,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.ShardSearchFailure; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 56e02926ed6..5d0c814a285 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -40,8 +40,23 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.unmodifiableMap; diff --git a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 5fea9489291..79bf97b908d 100644 --- a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -20,8 +20,6 @@ package org.elasticsearch.transport; -import java.lang.reflect.Constructor; -import java.util.concurrent.Callable; import java.util.function.Supplier; /** diff --git a/core/src/main/java/org/elasticsearch/transport/TransportFuture.java b/core/src/main/java/org/elasticsearch/transport/TransportFuture.java index c4bfcb7afea..5d34d0c0338 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportFuture.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportFuture.java @@ -19,8 +19,6 @@ package org.elasticsearch.transport; -import org.elasticsearch.ElasticsearchException; - import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 444f52b9c03..709323cb305 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -42,7 +42,11 @@ import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java index b15add5445e..e6dfa97b613 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java @@ -21,7 +21,11 @@ package org.elasticsearch.transport.local; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.support.TransportStatus; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 3bf4fa6701d..99ce5faa086 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -31,10 +31,24 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.RequestHandlerRegistry; +import org.elasticsearch.transport.ResponseHandlerFailureTransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportSerializationException; +import org.elasticsearch.transport.TransportServiceAdapter; +import org.elasticsearch.transport.Transports; import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; +import org.jboss.netty.channel.WriteCompletionEvent; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java index fe3a941f665..edfe9f39b48 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java @@ -28,14 +28,17 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.netty.ReleaseChannelFutureListener; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import java.io.IOException; -import java.io.NotSerializableException; /** * diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index a287ec119e7..0f29ed5a2f7 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -23,8 +23,19 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.Similarity; @@ -33,7 +44,11 @@ import org.apache.lucene.util.TestUtil; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index 91be1f339dd..a547e18de52 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.util.Constants; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 46cdea3dadf..6650f596755 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -20,7 +20,6 @@ package org.elasticsearch; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParseException; -import org.apache.lucene.util.Constants; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.TimestampParsingException; @@ -30,7 +29,12 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IllegalShardRoutingStateException; +import org.elasticsearch.cluster.routing.RoutingTableValidation; +import org.elasticsearch.cluster.routing.RoutingValidationException; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.PathUtils; diff --git a/core/src/test/java/org/elasticsearch/NamingConventionTests.java b/core/src/test/java/org/elasticsearch/NamingConventionTests.java index 912f8922b07..41d67b88390 100644 --- a/core/src/test/java/org/elasticsearch/NamingConventionTests.java +++ b/core/src/test/java/org/elasticsearch/NamingConventionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch; import junit.framework.TestCase; - import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index a4d089c0f82..e66036cd0d1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -33,7 +33,9 @@ import org.hamcrest.Matchers; import java.io.IOException; import static org.hamcrest.CoreMatchers.allOf; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterHealthResponsesTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 55b0ba86aca..a64f67a8cd5 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index dfc6ea67c49..8439e98f0ee 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.flush; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse.ShardCounts; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index ebd32ccb482..3a81f0ba0d9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.shards; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Requests; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index 6d1159c82a5..70fd11e5de8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -27,12 +27,20 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.NodeDisconnectedException; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 8539c6cb8ec..4d7e9aa216f 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -27,10 +27,12 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.notNullValue; public class IndicesStatsTests extends ESSingleNodeTestCase { - + public void testSegmentStatsEmptyIndex() { createIndex("test"); IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get(); @@ -41,7 +43,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertEquals(0, stats.getNormsMemoryInBytes()); assertEquals(0, stats.getDocValuesMemoryInBytes()); } - + public void testSegmentStats() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index c642bdb1e79..09079be6ee9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index a38a46d0deb..c27f4c85a96 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -42,7 +42,6 @@ import org.junit.BeforeClass; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.Predicate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -73,7 +72,7 @@ public class UpgradeIT extends ESBackcompatTestCase { for (int i = 0; i < numIndexes; ++i) { final String indexName = "test" + i; indexNames[i] = indexName; - + Settings settings = Settings.builder() .put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()) // don't allow any merges so that we can check segments are upgraded @@ -101,7 +100,7 @@ public class UpgradeIT extends ESBackcompatTestCase { } else { assertEquals(0, flush(indexName).getFailedShards()); } - + // index more docs that won't be flushed numDocs = scaledRandomIntBetween(100, 1000); docs = new ArrayList<>(); @@ -128,14 +127,14 @@ public class UpgradeIT extends ESBackcompatTestCase { ensureGreen(); logger.info("--> Nodes upgrade complete"); logSegmentsState(); - + assertNotUpgraded(client()); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); // This test fires up another node running an older version of ES, but because wire protocol changes across major ES versions, it // means we can never generate ancient segments in this test (unless Lucene major version bumps but ES major version does not): assertFalse(hasAncientSegments(client(), indexToUpgrade)); - + logger.info("--> Running upgrade on index " + indexToUpgrade); assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get()); awaitBusy(() -> { @@ -204,7 +203,7 @@ public class UpgradeIT extends ESBackcompatTestCase { assertEquals("index " + status.getIndex() + " should be upgraded", 0, status.getToUpgradeBytes()); } - + // double check using the segments api that all segments are actually upgraded IndicesSegmentResponse segsRsp; if (index == null) { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 237f3a2e821..04b58e6b9fc 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.bulk; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 78f96bab7b2..81eb832be9a 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -37,7 +37,12 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; public class BulkRequestTests extends ESTestCase { public void testSimpleBulk1() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 4d73f932d2f..ebb3b5211f1 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -33,7 +33,9 @@ import org.junit.Before; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class RetryTests extends ESTestCase { // no need to wait fof a long time in tests diff --git a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java index 937cfb7b948..4fb94a4fb53 100644 --- a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java @@ -23,7 +23,10 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 1d3a9e18757..ad246ebc530 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -27,7 +27,11 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index b0c13f851a6..f20e54050c6 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.fd.FaultDetection; diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index b66196ae7d5..104c94db0c2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.rest.RestStatus; @@ -57,7 +56,6 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -346,4 +344,4 @@ public class TransportMasterNodeActionTests extends ESTestCase { assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 4d17155f611..d1abe8653f0 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 406e476b4e0..913d52d5b17 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -27,7 +27,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; @@ -35,7 +40,10 @@ import org.elasticsearch.index.shard.ShardId; import java.util.HashSet; import java.util.Set; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 5834b2662ad..fc419128b7c 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -55,7 +55,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportService; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index ba6e6b6532d..d1217ea6f31 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -28,8 +28,19 @@ import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.util.CharArraySet; -import org.apache.lucene.document.*; -import org.apache.lucene.index.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; @@ -43,7 +54,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java index 0eb7c0757e7..60fa0e9d684 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.BytesStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 5507686e355..0c542698b5f 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import com.carrotsearch.hppc.ObjectIntHashMap; - import org.apache.lucene.analysis.payloads.PayloadHelper; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DirectoryReader; diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 6cf7a3384ab..51053f63a01 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -34,7 +34,11 @@ import org.elasticsearch.test.ESTestCase; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class UpdateRequestTests extends ESTestCase { public void testUpdateRequest() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 07d59b820aa..fe025060a9d 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.bwcompat; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 95735b8648f..43633fe6f27 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -19,17 +19,14 @@ package org.elasticsearch.bwcompat; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.index.Fields; import org.apache.lucene.util.English; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.get.GetResponse; @@ -40,11 +37,13 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index bf71c0c2467..b0b5e9fd517 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -60,9 +60,19 @@ import org.junit.Before; import java.io.IOException; import java.io.InputStream; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index 9f16ade87e8..d8f93a23140 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -20,7 +20,6 @@ package org.elasticsearch.client.transport; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -28,7 +27,14 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportServiceAdapter; import java.io.IOException; import java.util.Collections; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 8aa065548df..31487614c99 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 48b2591559e..0e9f6cd9e04 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -34,7 +34,11 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.*; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.settings.IndexDynamicSettings; public class ClusterModuleTests extends ModuleTestCase { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 8d4540aad3b..faa0f15d8f2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -23,10 +23,21 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 2f1e5d33f7e..2d726d97424 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -39,7 +39,11 @@ import org.elasticsearch.test.disruption.NetworkDelaysPartition; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -47,8 +51,14 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @ESIntegTestCase.SuppressLocalMode diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java index 7d3825a14b8..13a5cae6ca3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.ack; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index 726590104f1..1e9c25ed78c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.allocation; import com.carrotsearch.hppc.ObjectIntHashMap; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexRoutingTable; diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index b85c17097f2..4298b27fa3d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -21,9 +21,9 @@ package org.elasticsearch.cluster.allocation; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -348,4 +348,4 @@ public class ClusterRerouteIT extends ESIntegTestCase { setClusterReadOnly(false); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java index 1faa82ad1b8..89a7f8ad65c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.io.IOException; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.ESIntegTestCase.*; +import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.hamcrest.Matchers.instanceOf; @ClusterScope(scope= Scope.TEST, numDataNodes =0) diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index a4d5a6f4a75..3562fa313ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -37,7 +37,10 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterStateHealthTests extends ESTestCase { private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); @@ -116,4 +119,4 @@ public class ClusterStateHealthTests extends ESTestCase { assertThat(clusterStateHealth.getValidationFailures(), empty()); assertThat(clusterStateHealth.getActiveShardsPercent(), is(allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0)))); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java index 730763372ca..5f48c5abde9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java @@ -21,7 +21,11 @@ package org.elasticsearch.cluster.health; import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.index.shard.ShardId; class RoutingTableGenerator { diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java index 9a91e1cd562..0315f1568af 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -29,7 +29,11 @@ import org.junit.BeforeClass; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java index 47ae3e68580..b451183826b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java @@ -19,7 +19,9 @@ package org.elasticsearch.cluster.routing; -import static org.elasticsearch.test.ESTestCase.*; +import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomInt; /** * Utility class the makes random modifications to ShardRouting diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 3288b92cb8e..20a731b0153 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -39,8 +39,13 @@ import org.elasticsearch.test.ESAllocationTestCase; import java.util.Collections; import java.util.EnumSet; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index e622036e13b..627febdbc11 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; @@ -37,9 +36,9 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 15a6ea0a5f4..fa9f4065dc1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -37,7 +37,10 @@ import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -823,4 +826,4 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { assertEquals(numRelocating, 1); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 7262de2b291..a8d015a0d49 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -39,10 +39,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 5377d09d4b5..e319d4156ac 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -29,7 +29,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; @@ -48,9 +55,14 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class DiskThresholdDeciderTests extends ESAllocationTestCase { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 52e88ea3bc9..8551af718e2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -20,16 +20,24 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService.DevNullClusterInfo; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingHelper; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 1bdc39036a3..f8be6a8c4da 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -31,9 +30,9 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Rebalance; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index a17017f6303..5d673e8d60c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -24,10 +24,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; -import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index fb8a8e28b33..fba6d8127b8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.settings; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.cluster.ClusterName; @@ -34,7 +33,6 @@ import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import static org.elasticsearch.common.inject.matcher.Matchers.not; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java index 48b1e899eb3..a272b6627e4 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java +++ b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.bytes; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; @@ -36,6 +35,7 @@ import java.nio.channels.GatheringByteChannel; import java.nio.charset.CharacterCodingException; import java.nio.charset.CharsetDecoder; import java.nio.charset.CoderResult; +import java.nio.charset.StandardCharsets; /** * Note: this is only used by one lone test method. diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index 38090403668..369aa8bcafd 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -24,8 +24,18 @@ import org.junit.Before; import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java index dcbbc1ed337..259ee109f0f 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java @@ -24,7 +24,6 @@ import java.util.List; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java b/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java index 90972185e0b..f574c8d9fe5 100644 --- a/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java @@ -21,7 +21,12 @@ package org.elasticsearch.common.collect; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; public class IteratorsTests extends ESTestCase { public void testConcatentation() { diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 94666d8c252..d65137b21b9 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -26,7 +26,14 @@ import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.ShapeCollection; import com.spatial4j.core.shape.jts.JtsGeometry; import com.spatial4j.core.shape.jts.JtsPoint; -import com.vividsolutions.jts.geom.*; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.GeometryFactory; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.LinearRing; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.Point; +import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -312,7 +319,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); - + ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline @@ -332,7 +339,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); - + ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline @@ -484,7 +491,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { public void testParse_invalidPolygon() throws IOException { /** - * The following 3 test cases ensure proper error handling of invalid polygons + * The following 3 test cases ensure proper error handling of invalid polygons * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java index 6a73717fa2f..ac439ff12e0 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java @@ -28,7 +28,6 @@ import com.spatial4j.core.shape.impl.PointImpl; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Polygon; - import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java index 1db9da428ad..bd90fefc922 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.unit.DistanceUnit; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java index 2015f2b0bc6..ec2b0ad356e 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java index 53e30cc5a80..7c9e9a58020 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java index 5d0ad7ed130..c2224ae6d68 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java index fca76e2e973..fb365df0122 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java index 1946d24581b..ba3f808d24a 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index ad8b3b817fe..ea83359c1f0 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; diff --git a/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java b/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java index a4f35389bd3..0b23002a890 100644 --- a/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java +++ b/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.hppc; import com.carrotsearch.hppc.ObjectHashSet; - import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java index 7d696b0cd81..8ba500333e1 100644 --- a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.Elements; +import org.elasticsearch.common.inject.spi.InstanceBinding; +import org.elasticsearch.common.inject.spi.LinkedKeyBinding; +import org.elasticsearch.common.inject.spi.ProviderInstanceBinding; +import org.elasticsearch.common.inject.spi.ProviderLookup; +import org.elasticsearch.common.inject.spi.UntargettedBinding; import org.elasticsearch.test.ESTestCase; import java.lang.annotation.Annotation; diff --git a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java index 39d24a0e792..4f2b8f6811c 100644 --- a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.io; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.elasticsearch.test.ESTestCase; -import org.junit.Assert; import org.junit.Before; import java.io.IOException; @@ -31,8 +30,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.elasticsearch.common.io.FileTestUtils.assertFileContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; /** * Unit tests for {@link org.elasticsearch.common.io.FileSystemUtils}. diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index cec70fb61f5..b5f26dba8a5 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -25,7 +25,12 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; public class StreamTests extends ESTestCase { public void testRandomVLongSerialization() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java index 5511796a2ed..a4a5972e45b 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.Version; import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 17345fd714f..0a15693dfd5 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -24,7 +24,14 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoDeletionPolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; @@ -35,7 +42,11 @@ import org.apache.lucene.util.Version; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index ad811a38aed..51d2ba77ec5 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -24,7 +24,13 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -36,7 +42,14 @@ import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java index ef8c55ddf90..7109b2db02c 100644 --- a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.common.network; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeBuilder; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.List; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; public class CidrsTests extends ESTestCase { public void testNullCidr() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 069418a7e1d..71914444725 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.settings; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java index 41ed95a519e..55bc2b8ddb9 100644 --- a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java +++ b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java @@ -28,7 +28,9 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.List; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; /** * Basic tests for the {@link BoundTransportAddress} class. These tests should not bind to any addresses but should diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index 184de7f385e..bf55a330509 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -21,9 +21,9 @@ package org.elasticsearch.common.util; import org.apache.lucene.util.BytesRef; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; diff --git a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java index df26f2d55b8..a26a06a09a3 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.ObjectLongHashMap; import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.cursors.ObjectLongCursor; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.TestUtil; diff --git a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java index 4c3612da8e0..8c192a2a350 100644 --- a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java @@ -25,7 +25,14 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.Counter; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java b/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java index 25564deb07e..b4bc99e9642 100644 --- a/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.util; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java index aa21f323185..f5ae388db77 100644 --- a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.LongLongHashMap; import com.carrotsearch.hppc.LongLongMap; import com.carrotsearch.hppc.cursors.LongLongCursor; - import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.HashMap; diff --git a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java index 35fa7bec058..bf091828ca5 100644 --- a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.LongObjectHashMap; - import org.elasticsearch.test.ESSingleNodeTestCase; public class LongObjectHashMapTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java index 94f154d4e5d..25c765e6480 100644 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.util; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -40,6 +39,7 @@ import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index deac15b50d3..685e06afb16 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -27,7 +27,13 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.concurrent.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 7489ea8f9dc..9b1cfb64573 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; - import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; @@ -97,17 +96,17 @@ public class XContentFactoryTests extends ESTestCase { is = new ByteArrayInputStream(new byte[] {(byte) 1}); assertNull(XContentFactory.xContentType(is)); } - + public void testJsonFromBytesOptionallyPrecededByUtf8Bom() throws Exception { byte[] bytes = new byte[] {(byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0x20, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) 0x20, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java index 50683007717..80cc12b5f3b 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java @@ -21,7 +21,12 @@ package org.elasticsearch.common.xcontent.support.filtering; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isEmptyString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class FilterPathTests extends ESTestCase { @@ -348,4 +353,4 @@ public class FilterPathTests extends ESTestCase { assertThat(filterPath.getSegment(), isEmptyString()); assertSame(filterPath, FilterPath.EMPTY); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java index 8e44c7a5442..4efedd9154a 100644 --- a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java +++ b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; - import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java index 1332a4fb13a..223f8612b31 100644 --- a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java @@ -31,7 +31,10 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.CyclicBarrier; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index b9d4fdec913..67d3df42b38 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -25,7 +25,11 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -55,7 +59,16 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; -import org.elasticsearch.test.disruption.*; +import org.elasticsearch.test.disruption.BlockClusterStateProcessing; +import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; +import org.elasticsearch.test.disruption.LongGCDisruption; +import org.elasticsearch.test.disruption.NetworkDelaysPartition; +import org.elasticsearch.test.disruption.NetworkDisconnectPartition; +import org.elasticsearch.test.disruption.NetworkPartition; +import org.elasticsearch.test.disruption.NetworkUnresponsivePartition; +import org.elasticsearch.test.disruption.ServiceDisruptionScheme; +import org.elasticsearch.test.disruption.SingleNodeDisruption; +import org.elasticsearch.test.disruption.SlowClusterStateProcessing; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportException; @@ -65,15 +78,31 @@ import org.elasticsearch.transport.TransportService; import org.junit.Before; import java.io.IOException; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java index c4955561905..537ffa3acd1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class ElectMasterServiceTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 4a248784f91..0ca261cbf65 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -44,8 +44,17 @@ import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Before; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 217e86526cc..9f9c0420c2f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -24,7 +24,10 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -56,7 +59,10 @@ import org.hamcrest.Matchers; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java index bc5e97ce08e..dfaf407f850 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java @@ -28,9 +28,19 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueue.ClusterStateContext; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class PendingClusterStatesQueueTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 0bac1bc4c2e..0637ae7de8e 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -21,13 +21,16 @@ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -35,6 +38,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; @@ -44,20 +48,35 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportConnectionListener; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @TestLogging("discovery.zen.publish:TRACE") public class PublishClusterStateActionTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java index 47ab7deb995..52f19d7deee 100644 --- a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.document; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -29,6 +28,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java index 5f3b0567e32..a661575cbec 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java @@ -30,12 +30,16 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index e25b95be578..60cf2ef5dc1 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -33,10 +33,15 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; /** */ @@ -430,4 +435,4 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 441314b1e35..98b62dc18ba 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -19,7 +19,12 @@ package org.elasticsearch.gateway; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.store.*; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -28,7 +33,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -41,10 +50,20 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.stream.StreamSupport; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; @LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS public class MetaDataStateFormatTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 193985a1c68..44c1fae6492 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -27,7 +27,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.Nullable; @@ -42,7 +46,8 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; /** */ diff --git a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java index 88499bf96cd..3b7e62216ce 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java @@ -19,11 +19,20 @@ package org.elasticsearch.gateway; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; public class PriorityComparatorTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 1a95b66817a..1dfab4f62d3 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.indices.flush.SyncedFlushUtil; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 0818999ea7e..0a6ddca5d24 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -27,7 +27,15 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java index 936a6fa09a0..aca3906d185 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -26,12 +26,10 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.indices.flush.SyncedFlushUtil; import org.elasticsearch.indices.recovery.RecoveryState; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.client; -import static org.elasticsearch.test.ESIntegTestCase.internalCluster; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java index e58df29aa3e..139e1a0647d 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java @@ -18,16 +18,28 @@ */ package org.elasticsearch.http.netty; -import java.nio.charset.StandardCharsets; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.jboss.netty.bootstrap.ClientBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.handler.codec.http.DefaultHttpRequest; +import org.jboss.netty.handler.codec.http.HttpChunkAggregator; +import org.jboss.netty.handler.codec.http.HttpClientCodec; +import org.jboss.netty.handler.codec.http.HttpMethod; +import org.jboss.netty.handler.codec.http.HttpRequest; +import org.jboss.netty.handler.codec.http.HttpResponse; import java.io.Closeable; import java.net.SocketAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index c2306132930..a9f44fc046e 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -22,7 +22,11 @@ import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index e214cea9cc1..9f22d44a214 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShadowIndexShard; import org.elasticsearch.index.translog.TranslogStats; diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java index 457cf31ec83..1f083466896 100644 --- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java @@ -19,15 +19,14 @@ package org.elasticsearch.index; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.plugins.Plugin; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 7cd16e350a4..f844d9ac7a6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -53,7 +53,10 @@ import java.util.Collections; import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index cd5138b4e0c..f467aa289f8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -36,7 +36,11 @@ import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index e685c21422b..a097d55f4a3 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -42,7 +42,9 @@ import java.util.Collections; import java.util.List; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.instanceOf; /** */ diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java index d931b478f3e..6da1a7721a4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java @@ -22,7 +22,12 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.ngram.*; +import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; +import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; +import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; +import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer; +import org.apache.lucene.analysis.ngram.Lucene43NGramTokenizer; +import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java index 6fa2e21fbd1..ab0a24d9dd8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java @@ -19,14 +19,14 @@ package org.elasticsearch.index.analysis; * under the License. */ -import java.io.IOException; -import java.util.Arrays; -import java.util.regex.Pattern; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.elasticsearch.test.ESTokenStreamTestCase; +import java.io.IOException; +import java.util.Arrays; +import java.util.regex.Pattern; + /** * Verifies the behavior of PatternAnalyzer. */ @@ -38,13 +38,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testNonWordPattern() throws IOException { // Split on non-letter pattern, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\W+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox", "the", "abcd1234", "56", "78", "dc" }); // split on non-letter pattern, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" }); } @@ -55,13 +55,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testWhitespacePattern() throws IOException { // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." }); // Split on whitespace patterns, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." }); } @@ -72,13 +72,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testCustomPattern() throws IOException { // Split on comma, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile(","), false, null); - assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", + assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", new String[] { "Here", "Are", "some", "Comma", "separated", "words" }); // split on comma, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", + assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", new String[] { "here", "some", "comma", "separated", "words" }); } @@ -102,10 +102,10 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, document.toString(), + assertAnalyzesTo(a, document.toString(), new String[] { new String(largeWord), new String(largeWord2) }); } - + /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java index 2e2a45fab6a..186f6ac1cb7 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.StopFilter; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index aa063a1d37e..1dbd9ac2bd9 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.apache.lucene.util.Version; -import org.elasticsearch.common.inject.ProvisionException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.test.ESTokenStreamTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java index 52730dd2616..f7c346c6570 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java @@ -34,8 +34,6 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; - -import static org.hamcrest.Matchers.instanceOf; public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 56bf966dd41..d2bf6bebc5c 100644 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -23,7 +23,13 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 2a122c38dde..9a33a052198 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -28,7 +28,17 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.LogDocMergePolicy; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; @@ -62,13 +72,25 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Engine.Searcher; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexSearcherWrapper; +import org.elasticsearch.index.shard.MergeSchedulerConfig; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardUtils; +import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.DirectoryUtils; @@ -91,7 +113,12 @@ import java.nio.charset.Charset; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -102,7 +129,12 @@ import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class InternalEngineTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 3fe7a540bf8..8e2501e9430 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -23,7 +23,13 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; @@ -71,7 +77,12 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * TODO: document me! diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index bed9b480c01..70e3b66553c 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -23,7 +23,13 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.RAMDirectory; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index 741ef3804b1..87cf5e1c570 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -24,13 +24,14 @@ import org.apache.lucene.document.GeoPointField; import org.apache.lucene.document.StringField; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.VersionUtils; import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * @@ -89,4 +90,4 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl } } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index b1f9d73de73..024a90ce7ba 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 73fdd79b108..616e7a44c12 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.hppc.ObjectArrayList; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 6c4673bbd0b..1c4514c9fc9 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index 59fc8952a67..b3485f3f212 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -23,7 +23,11 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java index 21780fdf1fa..943af64363e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java @@ -18,7 +18,9 @@ */ package org.elasticsearch.index.fielddata; -import org.apache.lucene.document.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.elasticsearch.index.fielddata.plain.AbstractAtomicGeoPointFieldData; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index b2f818c89f0..c9ac901d44f 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -23,7 +23,12 @@ import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; @@ -39,7 +44,14 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 230330dbbf5..561252a5c71 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java index e25b5c73c69..63b66f47d1a 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java @@ -21,87 +21,91 @@ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedDocValuesField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; public class ReplaceMissingTests extends ESTestCase { - + public void test() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iw = new IndexWriter(dir, iwc); - + Document doc = new Document(); doc.add(new SortedDocValuesField("field", new BytesRef("cat"))); iw.addDocument(doc); - + doc = new Document(); iw.addDocument(doc); - + doc = new Document(); doc.add(new SortedDocValuesField("field", new BytesRef("dog"))); iw.addDocument(doc); iw.forceMerge(1); iw.close(); - + DirectoryReader reader = DirectoryReader.open(dir); LeafReader ar = getOnlySegmentReader(reader); SortedDocValues raw = ar.getSortedDocValues("field"); assertEquals(2, raw.getValueCount()); - + // existing values SortedDocValues dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("cat")); assertEquals(2, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(0, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("dog")); assertEquals(2, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(1, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + // non-existing values dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("apple")); assertEquals(3, dv.getValueCount()); assertEquals("apple", dv.lookupOrd(0).utf8ToString()); assertEquals("cat", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - + assertEquals(1, dv.getOrd(0)); assertEquals(0, dv.getOrd(1)); assertEquals(2, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("company")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("company", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(1, dv.getOrd(1)); assertEquals(2, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("ebay")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); assertEquals("ebay", dv.lookupOrd(2).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(2, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + reader.close(); dir.close(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 966ea01e95c..ff66ffc6181 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -29,16 +29,12 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.FloatFieldMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index f4a7507a0b8..3c7a57f3849 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -28,17 +28,17 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.ExecutionException; + import static org.elasticsearch.test.VersionUtils.getFirstVersion; import static org.elasticsearch.test.VersionUtils.getPreviousVersion; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.hasToString; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.concurrent.ExecutionException; - public class MapperServiceTests extends ESSingleNodeTestCase { @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index bbba3432b66..0b6354a45b6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -49,12 +49,21 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; public class SimpleAllMapperTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index 0bc56b0c30b..06f42b3f49c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -20,13 +20,10 @@ package org.elasticsearch.index.mapper.binary; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; @@ -83,7 +80,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { } final byte[] binaryValue2 = out.bytes().toBytes(); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); - + for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { ParsedDocument doc = mapper.parse("test", "type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java index b81a3d6d40d..83160736865 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java @@ -20,7 +20,11 @@ package org.elasticsearch.index.mapper.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.*; +import org.apache.lucene.search.suggest.document.CompletionAnalyzer; +import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; +import org.apache.lucene.search.suggest.document.PrefixCompletionQuery; +import org.apache.lucene.search.suggest.document.RegexCompletionQuery; +import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; @@ -31,7 +35,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -39,7 +47,10 @@ import java.io.IOException; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class CompletionFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java index 7ab78864fb7..be5c85edf72 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; -import org.junit.Before; public class BinaryFieldTypeTests extends FieldTypeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index abca5595537..8a1b42f862a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index 1cb41480cb7..4c0bffcc1bd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.analysis.*; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 4772958bdb7..8387f724618 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -39,8 +39,12 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -53,12 +57,20 @@ import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class SimpleDateMappingTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 6f7541a272a..dc6c720402e 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.externalvalues; import com.spatial4j.core.shape.Point; - import org.apache.lucene.document.Field; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -29,7 +28,6 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index 8bdb5670dbb..7797762606a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -24,7 +24,6 @@ import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -105,7 +104,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public ExternalMetadataMapper build(BuilderContext context) { return new ExternalMetadataMapper(context.indexSettings()); } - + } public static class TypeParser implements MetadataFieldMapper.TypeParser { @@ -119,7 +118,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { return new ExternalMetadataMapper(indexSettings); } - + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 4efa12fca00..17a16913f3e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -38,8 +38,6 @@ import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -48,7 +46,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index c3a92e8846d..857dcd5d07c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; -import org.hamcrest.MatcherAssert; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java index 40cf05c4d6a..77fc409f30d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java @@ -29,11 +29,13 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class IndexTypeMapperTests extends ESSingleNodeTestCase { private Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - + public void testSimpleIndexMapperEnabledBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", true).endObject() @@ -51,7 +53,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("_index"), equalTo("test")); assertThat(doc.rootDoc().get("field"), equalTo("value")); } - + public void testExplicitDisabledIndexMapperBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", false).endObject() @@ -69,7 +71,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("_index"), nullValue()); assertThat(doc.rootDoc().get("field"), equalTo("value")); } - + public void testDefaultDisabledIndexMapper() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); @@ -86,7 +88,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("_index"), nullValue()); assertThat(doc.rootDoc().get("field"), equalTo("value")); } - + public void testThatMergingFieldMappingAllowsDisablingBackcompat() throws Exception { String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", true).endObject() @@ -103,7 +105,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { mapperEnabled.merge(mapperDisabled.mapping(), false, false); assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false)); } - + public void testThatDisablingWorksWhenMergingBackcompat() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", true).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 58fa8fd69b0..506e51f8070 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -31,9 +31,17 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; @@ -45,10 +53,14 @@ import java.util.Map; import java.util.TreeMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperBuilders.*; +import static org.elasticsearch.index.mapper.MapperBuilders.doc; +import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index 3719500669c..126c223f49d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -29,8 +29,6 @@ import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.test.ESSingleNodeTestCase; -import static org.hamcrest.Matchers.nullValue; - public class ParentMappingTests extends ESSingleNodeTestCase { public void testParentSetInDocNotAllowed() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java index 7d0afdb0724..ff0971034f8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java @@ -39,7 +39,10 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class RoutingTypeMapperTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index c30ea9bc6c6..5a6521ee3c9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -24,12 +24,15 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index d545452db0f..ce4e2cada7c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -65,7 +65,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 2d250ff0b95..8faa2dac524 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; - import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -61,7 +60,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; @@ -83,8 +86,13 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.script.*; +import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptContextRegistry; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -104,10 +112,20 @@ import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutionException; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; public abstract class AbstractQueryTestCase> extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 5d94a9a8bca..b1a4f7ccde1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -19,18 +19,27 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; - import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; -import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java index 8a6626be3d4..f58d0f926a1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java @@ -26,10 +26,16 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import java.io.IOException; -import java.util.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { /** @@ -145,26 +151,26 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java index 6a952cebc0c..78f7de9d9d5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.GeoPointDistanceRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.SloppyMath; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; @@ -298,19 +297,19 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase public void testFromJson() throws IOException { String json = - "{\n" + - " \"geohash_cell\" : {\n" + - " \"neighbors\" : true,\n" + - " \"precision\" : 3,\n" + - " \"pin\" : \"t4mk70fgk067\",\n" + - " \"boost\" : 1.0\n" + - " }\n" + + "{\n" + + " \"geohash_cell\" : {\n" + + " \"neighbors\" : true,\n" + + " \"precision\" : 3,\n" + + " \"pin\" : \"t4mk70fgk067\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + "}"; GeohashCellQuery.Builder parsed = (GeohashCellQuery.Builder) parseQuery(json); checkGeneratedJson(json, parsed); diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index 51da0fc3996..f2b3a1a5026 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -21,19 +21,18 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; @@ -50,10 +49,9 @@ import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Collections; -import static org.hamcrest.Matchers.containsString; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.containsString; public class HasChildQueryBuilderTests extends AbstractQueryTestCase { protected static final String PARENT_TYPE = "parent"; diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java index b391930c32c..5dc1d66cd5f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchParseException; @@ -45,10 +44,9 @@ import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Arrays; -import static org.hamcrest.Matchers.containsString; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.containsString; public class HasParentQueryBuilderTests extends AbstractQueryTestCase { protected static final String PARENT_TYPE = "parent"; @@ -245,20 +243,20 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase { private List randomTerms; @@ -260,11 +260,11 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase fileStores = new ArrayList<>(); @@ -98,7 +102,7 @@ public class NewPathForShardTests extends ESTestCase { public MockFileStore(String desc) { this.desc = desc; } - + @Override public String type() { return "mock"; @@ -204,7 +208,7 @@ public class NewPathForShardTests extends ESTestCase { // had the most free space, never using the other drive unless new shards arrive // after the first shards started using storage: assertNotEquals(result1.getDataPath(), result2.getDataPath()); - + nodeEnv.close(); } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java index 3422c66a3e7..a59dcb49aca 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java @@ -21,7 +21,10 @@ package org.elasticsearch.index.shard; import org.apache.lucene.document.Document; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.CompositeReaderContext; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java index 38fc17c777e..4bd8ba9cb3e 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.snapshots.blobstore; import com.carrotsearch.randomizedtesting.generators.RandomInts; - import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 5a1aa2ef469..bee540ba9ea 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -33,7 +33,11 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Nullable; @@ -44,7 +48,11 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.gateway.PrimaryShardAllocator; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.MergePolicyConfig; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoveryTarget; @@ -69,7 +77,14 @@ import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -78,8 +93,16 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class CorruptedFileIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 653a7d04e9e..6194183c983 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; diff --git a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java index f5b7fc250aa..234de11b516 100644 --- a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.discovery.Discovery; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index ee3ad6b8b29..f27d9ddcd2b 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.NoLockFactory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java b/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java index 3d9c4f732bf..c65a02ed692 100644 --- a/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java @@ -19,50 +19,50 @@ package org.elasticsearch.index.store; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.elasticsearch.test.ESTestCase; + import java.nio.charset.StandardCharsets; import java.util.zip.Adler32; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.Directory; -import org.elasticsearch.test.ESTestCase; - -/** +/** * Simple tests for LegacyVerification (old segments) - * @deprecated remove this test when support for lucene 4.x - * segments is not longer needed. + * @deprecated remove this test when support for lucene 4.x + * segments is not longer needed. */ @Deprecated public class LegacyVerificationTests extends ESTestCase { - + public void testAdler32() throws Exception { Adler32 expected = new Adler32(); byte bytes[] = "abcdefgh".getBytes(StandardCharsets.UTF_8); expected.update(bytes); String expectedString = Store.digestToString(expected.getValue()); - + Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("legacy", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.Adler32VerifyingIndexOutput(o, expectedString, 8); out.writeBytes(bytes, 0, bytes.length); out.verify(); out.close(); out.verify(); - + dir.close(); } - + public void testAdler32Corrupt() throws Exception { Adler32 expected = new Adler32(); byte bytes[] = "abcdefgh".getBytes(StandardCharsets.UTF_8); expected.update(bytes); String expectedString = Store.digestToString(expected.getValue()); - + byte corruptBytes[] = "abcdefch".getBytes(StandardCharsets.UTF_8); Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("legacy", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.Adler32VerifyingIndexOutput(o, expectedString, 8); out.writeBytes(corruptBytes, 0, bytes.length); @@ -73,33 +73,33 @@ public class LegacyVerificationTests extends ESTestCase { // expected exception } out.close(); - + try { out.verify(); fail(); } catch (CorruptIndexException e) { // expected exception } - + dir.close(); } - + public void testLengthOnlyOneByte() throws Exception { Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("oneByte", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.LengthVerifyingIndexOutput(o, 1); out.writeByte((byte) 3); out.verify(); out.close(); out.verify(); - + dir.close(); } - + public void testLengthOnlyCorrupt() throws Exception { Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("oneByte", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.LengthVerifyingIndexOutput(o, 2); out.writeByte((byte) 3); @@ -109,16 +109,16 @@ public class LegacyVerificationTests extends ESTestCase { } catch (CorruptIndexException expected) { // expected exception } - + out.close(); - + try { out.verify(); fail(); } catch (CorruptIndexException expected) { // expected exception } - + dir.close(); } } diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 1e1e9487668..acaa1cf8b5d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -24,9 +24,35 @@ import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.SegmentInfoFormat; import org.apache.lucene.codecs.lucene50.Lucene50SegmentInfoFormat; import org.apache.lucene.codecs.lucene54.Lucene54Codec; -import org.apache.lucene.document.*; -import org.apache.lucene.index.*; -import org.apache.lucene.store.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexFormatTooNewException; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexNotFoundException; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.NoDeletionPolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.TestUtil; @@ -57,14 +83,29 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.Adler32; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class StoreTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java index a29cc6cf8d0..b021f3252d6 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.IndexSettingsModule; -import java.io.IOException; import java.nio.file.Path; /** diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 8b3294c15b8..3173f7c5dc9 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -57,14 +57,29 @@ import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 818937c511e..8abe19ffbb6 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -43,9 +43,15 @@ import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; /** diff --git a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index 792f14bce1e..488472c26df 100644 --- a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -25,13 +25,12 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 968266f00c3..ec182a69890 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -19,14 +19,13 @@ package org.elasticsearch.indices; -import org.apache.lucene.analysis.hunspell.Dictionary; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.TermQueryParser; import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; public class IndicesModuleTests extends ModuleTestCase { diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java index 7034d5b439f..2b19b01f2c4 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java @@ -20,9 +20,6 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.util.StopwordAnalyzerBase; -import org.apache.lucene.util.Version; - -import java.io.Reader; public class DummyAnalyzer extends StopwordAnalyzerBase { diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java index c25b20699aa..83295e9548a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.indices.cache.query; -import java.io.IOException; - import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -42,6 +40,8 @@ import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + public class IndicesQueryCacheTests extends ESTestCase { private static class DummyQuery extends Query { diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 9ff0df4d390..4fdd76dae8d 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -20,9 +20,9 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index fdabaf6b5a8..485ec020c3f 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -20,8 +20,6 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.engine.Engine; @@ -32,9 +30,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.ESIntegTestCase.client; -import static org.elasticsearch.test.ESTestCase.randomBoolean; - /** Utils for SyncedFlush */ public class SyncedFlushUtil { diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index ed4b95c03d8..32c9d3ed621 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -42,10 +42,19 @@ import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; @ClusterScope(randomDynamicTemplates = false) public class UpdateMappingIntegrationIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java index 6d8dda3afb9..83c7be0374b 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java @@ -27,7 +27,12 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java index 212d7ecbb7b..033b3bb75ca 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.indices.memory.breaker; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 2b8f5ea6388..dc61d4bc5fe 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 75a2b14060b..707fbe0e02e 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -34,9 +34,9 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.Index; diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java index 8b23354ebb8..4c1a6420bfd 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.recovery.RecoveryState.*; +import org.elasticsearch.indices.recovery.RecoveryState.File; +import org.elasticsearch.indices.recovery.RecoveryState.Index; +import org.elasticsearch.indices.recovery.RecoveryState.Stage; +import org.elasticsearch.indices.recovery.RecoveryState.Timer; +import org.elasticsearch.indices.recovery.RecoveryState.Translog; +import org.elasticsearch.indices.recovery.RecoveryState.Type; +import org.elasticsearch.indices.recovery.RecoveryState.VerifyIndex; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -37,7 +43,14 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class RecoveryStateTests extends ESTestCase { abstract class Streamer extends Thread { diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index efb94aff31e..53a93fd0bd1 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -20,8 +20,8 @@ package org.elasticsearch.indices.settings; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 2e73a466677..4bf752886c9 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -56,12 +56,19 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; /** */ diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index af9cfeb94c1..9522b79ea11 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -20,11 +20,11 @@ package org.elasticsearch.indices.state; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.logging.ESLogger; @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; -import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException; import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.common.settings.Settings.settingsBuilder; diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 4bdd972ea9c..e40e1c03de1 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -23,8 +23,8 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -49,9 +49,9 @@ import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.EnumSet; diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 948b76b963d..9c9cb01e597 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -20,14 +20,20 @@ package org.elasticsearch.indices.store; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; @@ -35,7 +41,6 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -50,7 +55,11 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.nio.file.Files; diff --git a/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java index 62bac50b0a1..5ca4a99ac1a 100644 --- a/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.warmer.IndexWarmersMetaData; diff --git a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java index 8470020f823..dbbf3bf7247 100644 --- a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 0d59341f1c9..5d482edafd1 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -22,7 +22,15 @@ package org.elasticsearch.monitor.os; import org.apache.lucene.util.Constants; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class OsProbeTests extends ESTestCase { OsProbe probe = OsProbe.getInstance(); diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 9236a16dcbd..4dda068ddd6 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -35,10 +35,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class InternalSettingsPreparerTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index 6b23bb09f24..93ba861dca0 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -31,7 +31,9 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import java.util.List; import static org.elasticsearch.client.Requests.nodesInfoRequest; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index f627e0217e6..be1acb1218d 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.SearchResponse; diff --git a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java index db660695843..6a0485133a2 100644 --- a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java @@ -21,11 +21,11 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; import org.elasticsearch.action.percolate.PercolateRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 000365f6a20..deaff46f27b 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -23,12 +23,9 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.Properties; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java index 83fa3e21295..4b1645a4ec6 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java @@ -21,7 +21,13 @@ package org.elasticsearch.plugins.responseheader; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; public class TestResponseHeaderRestAction extends BaseRestHandler { diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 7095639eafc..bdac3dad8e6 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -44,7 +44,10 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 541911ce4e0..6542a8ab1c6 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.recovery; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.procedures.IntProcedure; - import org.apache.lucene.index.IndexFileNames; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; diff --git a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index 60a14abac7c..bfaf961ee21 100644 --- a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.query.QueryBuilders; diff --git a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java index e60a120ff18..065b99ea5ae 100644 --- a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java @@ -27,7 +27,9 @@ import java.util.Locale; import java.util.Map; import java.util.regex.Pattern; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index a5b7da7796f..7e59253ed97 100644 --- a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -38,7 +38,9 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; public class SimpleRoutingIT extends ESIntegTestCase { @@ -46,7 +48,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { protected int minimumNumberOfShards() { return 2; } - + public void testSimpleCrudRouting() throws Exception { createIndex("test"); ensureGreen(); @@ -87,7 +89,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - + public void testSimpleSearchRouting() { createIndex("test"); ensureGreen(); @@ -153,7 +155,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareSearch().setSize(0).setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l)); } } - + public void testRequiredRoutingMapping() throws Exception { client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) @@ -214,7 +216,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false)); } } - + public void testRequiredRoutingWithPathMapping() throws Exception { client().admin().indices().prepareCreate("test") .addAlias(new Alias("alias")) @@ -253,7 +255,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - + public void testRequiredRoutingWithPathMappingBulk() throws Exception { client().admin().indices().prepareCreate("test") .addAlias(new Alias("alias")) @@ -314,9 +316,9 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - + public void testRequiredRoutingWithPathNumericType() throws Exception { - + client().admin().indices().prepareCreate("test") .addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") @@ -345,7 +347,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - + public void testRequiredRoutingMapping_variousAPIs() throws Exception { client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 0afd72dab2b..0f00c2dd58d 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -33,7 +33,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import static java.util.Collections.singleton; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.util.set.Sets.newHashSet; diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index ab325e9e0c9..3c939e7e91a 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -36,10 +36,8 @@ import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index fb3de6b0faa..8c18d1dd74b 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -20,8 +20,12 @@ package org.elasticsearch.search; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 376e8578e2e..15313095650 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.highlight.CustomHighlighter; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.PlainHighlighter; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java index 5154dcc39e1..782ac3225f5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntMap; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java index 9d83428038d..ee19f14293a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntMap; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.missing.Missing; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index 540420c21bc..e0c7d2352c3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -18,14 +18,12 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.children.Children; import org.elasticsearch.search.aggregations.bucket.terms.Terms; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 97cd659a1d9..8a4aeb6ed49 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -35,8 +35,8 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index eed080071bb..4fbbef8a58d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; import com.carrotsearch.hppc.cursors.ObjectIntCursor; - import org.apache.lucene.util.GeoHashUtils; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 0dce2000f1b..607b6902f8c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -22,8 +22,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.junit.Ignore; import java.util.ArrayList; import java.util.Arrays; @@ -31,7 +29,6 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 1f77ca5bb64..6c1e7dfcabf 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -41,24 +41,38 @@ import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceSco import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; -import org.junit.Test; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index d20dff0ae05..0fe9113e8f8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -49,10 +49,19 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index 6419e9dcac3..67c49a799b7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -36,7 +36,13 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.geoBound import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.sameInstance; /** * @@ -259,4 +265,4 @@ public class GeoBoundsIT extends AbstractGeoTestCase { assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index c4611546493..903c1bab356 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -22,8 +22,14 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.*; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.search.lookup.LeafSearchLookup; @@ -31,7 +37,10 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java index 9a00297c57e..e58899807ab 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import com.carrotsearch.hppc.BitMixer; import com.carrotsearch.hppc.IntHashSet; - import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 207b626409b..90d4437fcea 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -57,7 +57,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; -import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index 7a359ceb39d..3b4e84f8e7c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index 18e93656562..e6b0981f544 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.LeafSearchScript; diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 8d401e5e2e6..28874d2e2a4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.basic; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -93,4 +93,4 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase { cluster().wipeIndices("test"); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 4586612b007..cbc7f93ff5a 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -22,12 +22,12 @@ package org.elasticsearch.search.basic; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.GeohashCellQuery; diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 63c142f1e74..45be05c10d8 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -21,10 +21,10 @@ package org.elasticsearch.search.child; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index dfa28947ca2..cea8df63a4b 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -23,7 +23,6 @@ import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Shape; - import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; @@ -79,7 +78,11 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirs import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index c0cc17fc43d..1ae211bc242 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.geo; import com.spatial4j.core.shape.Rectangle; - import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 4063ec81a28..655dd82071b 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -54,6 +53,7 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; @@ -66,7 +66,6 @@ import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 65fbbd3340d..daa996be702 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -21,10 +21,10 @@ package org.elasticsearch.search.innerhits; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.Version; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index f281eb3281f..ae163eaf4a6 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -19,12 +19,12 @@ package org.elasticsearch.search.morelikethis; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 39d052400cb..61890092831 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.nested; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; @@ -29,6 +28,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java index 93e94c49b47..fa9626964e8 100644 --- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.preference; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java index bb33364a751..271a9b5ae88 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -19,11 +19,16 @@ package org.elasticsearch.search.profile; -import org.elasticsearch.action.search.*; -import org.elasticsearch.search.SearchHit; import org.apache.lucene.util.English; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.index.query.*; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -33,7 +38,10 @@ import java.util.Map; import static org.elasticsearch.search.profile.RandomQueryGenerator.randomQueryBuilder; import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.isEmptyOrNullString; +import static org.hamcrest.Matchers.not; public class QueryProfilerIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java index fb8cd40ce52..9eb41086bed 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java @@ -21,12 +21,23 @@ package org.elasticsearch.search.profile; import org.apache.lucene.util.English; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.CommonTermsQueryBuilder; +import org.elasticsearch.index.query.DisMaxQueryBuilder; +import org.elasticsearch.index.query.FuzzyQueryBuilder; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; import java.util.ArrayList; import java.util.List; -import static com.carrotsearch.randomizedtesting.RandomizedTest.*; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomFloat; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; import static org.junit.Assert.assertTrue; @@ -263,4 +274,4 @@ public class RandomQueryGenerator { return q; } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 73906b2ed83..a508883e53a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -40,7 +40,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; public class ExistsIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 235438cc442..a789497235e 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; diff --git a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 720d51508f7..ac723778abb 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.scroll; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 0f5ac1a522f..e071d4374c2 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -47,11 +47,18 @@ import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -62,7 +69,15 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; @SuppressCodecs("*") // requires custom completion format public class CompletionSuggestSearchIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index ae6ec51ac36..18d6d9b99f9 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,9 +19,8 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.GeoHashUtils; +import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.geo.GeoPoint; @@ -30,11 +29,23 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; +import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; +import org.elasticsearch.search.suggest.completion.context.ContextBuilder; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 27a6529bdd8..15e449f786d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -23,19 +23,25 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; -import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.*; +import java.util.List; +import java.util.Set; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.isIn; public class CategoryContextMappingTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 60974baee84..51cd83c3c5d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.GeoHashUtils; -import org.elasticsearch.common.inject.matcher.Matchers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -30,10 +29,14 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.ContextBuilder; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.suggest.completion.CategoryContextMappingTests.assertContextSuggestFields; diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 8fde9bbf330..1ef3fddcf8e 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -20,7 +20,12 @@ package org.elasticsearch.snapshots; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.PendingClusterTask; diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 7946116f571..f5ca9211dff 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; - import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 57a22c0dd15..669527fd5f3 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStage; @@ -38,8 +37,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +54,6 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -79,7 +77,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.client.Requests.getSnapshotsRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java index 360f549ecbd..eb069d4721c 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java @@ -19,14 +19,13 @@ package org.elasticsearch.snapshots; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.FileSystemUtils; diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index b1e89cdf268..55744697b4d 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -29,7 +29,6 @@ import com.spatial4j.core.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; import org.elasticsearch.common.geo.builders.LineStringBuilder; diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 595e84f41b6..3400f9637ff 100644 --- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -25,8 +25,13 @@ import com.spatial4j.core.shape.impl.GeoCircle; import com.spatial4j.core.shape.impl.RectangleImpl; import com.spatial4j.core.shape.jts.JtsGeometry; import com.spatial4j.core.shape.jts.JtsPoint; -import com.vividsolutions.jts.geom.*; -import org.elasticsearch.ElasticsearchParseException; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.MultiPoint; +import com.vividsolutions.jts.geom.MultiPolygon; +import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -58,29 +63,29 @@ public class ElasticsearchGeoAssertions { } return top; } - + private static int prev(int top, Coordinate...points) { for (int i = 1; i < points.length; i++) { int p = (top + points.length - i) % points.length; if((points[p].x != points[top].x) || (points[p].y != points[top].y)) { return p; - } + } } return -1; } - + private static int next(int top, Coordinate...points) { for (int i = 1; i < points.length; i++) { int n = (top + i) % points.length; if((points[n].x != points[top].x) || (points[n].y != points[top].y)) { return n; - } + } } return -1; } - + private static Coordinate[] fixedOrderedRing(List coordinates, boolean direction) { - return fixedOrderedRing(coordinates.toArray(new Coordinate[coordinates.size()]), direction); + return fixedOrderedRing(coordinates.toArray(new Coordinate[coordinates.size()]), direction); } private static Coordinate[] fixedOrderedRing(Coordinate[] points, boolean direction) { @@ -105,7 +110,7 @@ public class ElasticsearchGeoAssertions { return points; } } - + } public static void assertEquals(Coordinate c1, Coordinate c2) { @@ -115,7 +120,7 @@ public class ElasticsearchGeoAssertions { private static boolean isRing(Coordinate[] c) { return (c[0].x == c[c.length-1].x) && (c[0].y == c[c.length-1].y); } - + public static void assertEquals(Coordinate[] c1, Coordinate[] c2) { Assert.assertEquals(c1.length, c2.length); @@ -234,7 +239,7 @@ public class ElasticsearchGeoAssertions { public static void assertMultiLineString(Shape shape) { assert(unwrap(shape) instanceof MultiLineString): "expected MultiLineString but found " + unwrap(shape).getClass().getName(); } - + public static void assertDistance(String geohash1, String geohash2, Matcher match) { GeoPoint p1 = new GeoPoint(geohash1); GeoPoint p2 = new GeoPoint(geohash2); @@ -244,7 +249,7 @@ public class ElasticsearchGeoAssertions { public static void assertDistance(double lat1, double lon1, double lat2, double lon2, Matcher match) { assertThat(distance(lat1, lon1, lat2, lon2), match); } - + private static double distance(double lat1, double lon1, double lat2, double lon2) { return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 3d57c1d5206..e655f452688 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.threadpool; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 56b2a03bad1..e1b1c4451c9 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -36,7 +36,12 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; /** */ diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 6599412834d..a5b6e08de3f 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -21,10 +21,10 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index 7a3fd88f93b..33d0f6ef528 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -21,9 +21,9 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; diff --git a/distribution/build.gradle b/distribution/build.gradle index 4da164131f3..fcf22ff48dc 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -17,15 +17,14 @@ * under the License. */ + import org.apache.tools.ant.filters.FixCrLfFilter import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.EmptyDirTask +import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.precommit.DependencyLicensesTask import org.elasticsearch.gradle.precommit.UpdateShasTask import org.elasticsearch.gradle.test.RunTask -import org.elasticsearch.gradle.EmptyDirTask -import org.elasticsearch.gradle.MavenFilteringHack -import org.gradle.api.InvalidUserDataException -import org.gradle.internal.nativeintegration.filesystem.Chmod // for deb/rpm buildscript { diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index 0fa83d92d76..a92e8d9dd38 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -25,7 +25,6 @@ import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.search.MultiValueMode; import java.io.IOException; import java.util.Map; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java index f7198fc0ae2..3ed2ed1f0b5 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java @@ -19,14 +19,14 @@ package org.elasticsearch.script.expression; -import java.util.Calendar; -import java.util.Locale; -import java.util.TimeZone; - import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.search.MultiValueMode; +import java.util.Calendar; +import java.util.Locale; +import java.util.TimeZone; + class DateMethodFunctionValues extends FieldDataFunctionValues { private final int calendarType; private final Calendar calendar; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index 522b546656d..8bbf6251fc3 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -19,18 +19,17 @@ package org.elasticsearch.script.expression; -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; - import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.search.MultiValueMode; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + class DateMethodValueSource extends FieldDataValueSource { protected final String methodName; diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 89a5be7ff1c..b4c0106abbe 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -21,15 +21,12 @@ package org.elasticsearch.script.expression; import org.apache.lucene.expressions.Expression; import org.apache.lucene.expressions.js.JavascriptCompiler; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.common.lucene.search.function.CombineFunction; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; @@ -97,7 +94,7 @@ public class MoreExpressionTests extends ESIntegTestCase { assertEquals(1, rsp.getHits().getTotalHits()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); } - + public void testFunction() throws Exception { createIndex("test"); ensureGreen("test"); diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 1ce5a2ab761..98ed5695973 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -44,7 +44,14 @@ import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ClassPermission; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScoreAccessor; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java index d893b2767ca..2059b069ab1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 728a932d2b5..9b2e0041462 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -78,14 +78,11 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 74bef40b713..584a8d2c284 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.IntHashSet; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -78,7 +77,7 @@ public class EquivalenceTests extends ESIntegTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Duel with filters public void testRandomRanges() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java index dd3d2e99fcd..b8c6f6dcc7f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index de24124669a..a8f78c62c77 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -65,7 +64,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java index 42141e6afb0..7a6ffa5edf0 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java @@ -38,11 +38,21 @@ import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.*; -import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.nullValue; public class RandomScoreFunctionTests extends ESIntegTestCase { @@ -50,7 +60,7 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + public void testConsistentHitsWithSameSeed() throws Exception { createIndex("test"); ensureGreen(); // make sure we are done otherwise preference could change? @@ -244,7 +254,7 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { } } } - + public void testSeeds() throws Exception { createIndex("test"); ensureGreen(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index de9b5b5f4f5..8d959022412 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -48,24 +47,57 @@ import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.*; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.GeoDistanceSortBuilder; +import org.elasticsearch.search.sort.ScriptSortBuilder; +import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.ExecutionException; import static org.apache.lucene.util.GeoUtils.TOLERANCE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.search.sort.SortBuilders.fieldSort; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 8d9279ca0dd..446a4dfd562 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.script.groovy; +import groovy.lang.MissingPropertyException; import org.apache.lucene.util.Constants; import org.codehaus.groovy.control.MultipleCompilationErrorsException; import org.elasticsearch.common.settings.Settings; @@ -27,8 +28,6 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import groovy.lang.MissingPropertyException; - import java.nio.file.Path; import java.security.PrivilegedActionException; import java.util.AbstractMap; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 78fc6571f12..f43856cb7b5 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -19,7 +19,6 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.Mustache; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index 87cc51c2ec2..4b3d3f3ff98 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -32,7 +31,6 @@ import org.elasticsearch.script.Template; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.rest.support.FileUtils; import java.nio.file.Files; import java.nio.file.Path; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 76c867802a9..d8cf7732378 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; - import org.elasticsearch.test.ESTestCase; import java.io.StringReader; diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java index 674ae8b8f12..43165555430 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Collator; import com.ibm.icu.text.RawCollationKey; - import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -36,7 +35,7 @@ import java.io.IOException; *

      * WARNING: Make sure you use exactly the same Collator at * index and query time -- CollationKeys are only comparable when produced by - * the same Collator. {@link com.ibm.icu.text.RuleBasedCollator}s are + * the same Collator. {@link com.ibm.icu.text.RuleBasedCollator}s are * independently versioned, so it is safe to search against stored * CollationKeys if the following are exactly the same (best practice is * to store this information with the index and check that they remain the @@ -49,11 +48,11 @@ import java.io.IOException; *

    • * The collation strength used - see {@link Collator#setStrength(int)} *
    • - * + * *

      * CollationKeys generated by ICU Collators are not compatible with those - * generated by java.text.Collators. Specifically, if you use - * ICUCollationKeyFilter to generate index terms, do not use + * generated by java.text.Collators. Specifically, if you use + * ICUCollationKeyFilter to generate index terms, do not use * {@code CollationKeyFilter} on the query side, or vice versa. *

      *

      @@ -74,7 +73,7 @@ public final class ICUCollationKeyFilter extends TokenFilter { private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); /** - * + * * @param input Source token stream * @param collator CollationKey generator */ diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java index 51243856a1f..b31502cdd7d 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java @@ -19,19 +19,18 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.icu.ICUFoldingFilter; -import org.elasticsearch.common.settings.Settings; - import com.ibm.icu.text.FilteredNormalizer2; import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.UnicodeSet; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.icu.ICUFoldingFilter; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** - * Uses the {@link org.apache.lucene.analysis.icu.ICUFoldingFilter}. + * Uses the {@link org.apache.lucene.analysis.icu.ICUFoldingFilter}. * Applies foldings from UTR#30 Character Foldings. *

      * Can be filtered to handle certain characters in a specified way (see http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html) @@ -54,7 +53,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory { // The ICUFoldingFilter is in fact implemented as a ICUNormalizer2Filter. // ICUFoldingFilter lacks a constructor for adding filtering so we implemement it here - if (unicodeSetFilter != null) { + if (unicodeSetFilter != null) { Normalizer2 base = Normalizer2.getInstance( ICUFoldingFilter.class.getResourceAsStream("utr30.nrm"), "utr30", Normalizer2.Mode.COMPOSE); @@ -62,7 +61,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory { unicodeSet.freeze(); Normalizer2 filtered = new FilteredNormalizer2(base, unicodeSet); - return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, filtered); + return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, filtered); } else { return new ICUFoldingFilter(tokenStream); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java index 6f830b29d15..4833e887153 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java index 1d5136f60e1..6ecdf3888e9 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java @@ -23,7 +23,6 @@ import com.ibm.icu.text.Transliterator; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.icu.ICUTransformFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java index 82be3c66159..38b5da309be 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java @@ -17,7 +17,7 @@ package org.elasticsearch.index.analysis; * limitations under the License. */ -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; // javadoc +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; /** * Provides support for converting byte sequences to Strings and back again. @@ -26,7 +26,7 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; // ja * The Strings are constructed using a Base 8000h encoding of the original * binary data - each char of an encoded String represents a 15-bit chunk * from the byte sequence. Base 8000h was chosen because it allows for all - * lower 15 bits of char to be used without restriction; the surrogate range + * lower 15 bits of char to be used without restriction; the surrogate range * [U+D8000-U+DFFF] does not represent valid chars, and would require * complicated handling to avoid them and allow use of char's high bit. *

      @@ -61,7 +61,7 @@ public final class IndexableBinaryStringTools { /** * Returns the number of chars required to encode the given bytes. - * + * * @param inputArray byte sequence to be encoded * @param inputOffset initial offset into inputArray * @param inputLength number of bytes in inputArray @@ -75,7 +75,7 @@ public final class IndexableBinaryStringTools { /** * Returns the number of bytes required to decode the given char sequence. - * + * * @param encoded char sequence to be decoded * @param offset initial offset * @param length number of characters @@ -97,7 +97,7 @@ public final class IndexableBinaryStringTools { * Encodes the input byte sequence into the output char sequence. Before * calling this method, ensure that the output array has sufficient * capacity by calling {@link #getEncodedLength(byte[], int, int)}. - * + * * @param inputArray byte sequence to be encoded * @param inputOffset initial offset into inputArray * @param inputLength number of bytes in inputArray @@ -151,7 +151,7 @@ public final class IndexableBinaryStringTools { * Decodes the input char sequence into the output byte sequence. Before * calling this method, ensure that the output array has sufficient capacity * by calling {@link #getDecodedLength(char[], int, int)}. - * + * * @param inputArray char sequence to be decoded * @param inputOffset initial offset into inputArray * @param inputLength number of chars in inputArray @@ -233,7 +233,7 @@ public final class IndexableBinaryStringTools { this.finalShift = finalShift; this.finalMask = (short)((short)0xFF >>> finalShift); if (finalShift != 0) { - advanceBytes = 1; + advanceBytes = 1; } } } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java index 47c2f8f051a..46b8d530f5f 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java @@ -19,7 +19,12 @@ package org.elasticsearch.plugin.analysis.icu; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.IcuCollationTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuFoldingTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuNormalizerCharFilterFactory; +import org.elasticsearch.index.analysis.IcuNormalizerTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuTokenizerFactory; +import org.elasticsearch.index.analysis.IcuTransformTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.Plugin; diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 9e59b8e42c3..33c1f337dbd 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Collator; import com.ibm.icu.text.RuleBasedCollator; import com.ibm.icu.util.ULocale; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index bd2f959bf9c..acdbd9d4dfc 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; - import org.apache.lucene.analysis.CharFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java index f363cf0c15c..e191d78198f 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseBaseFormFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java index 7d25ca03fdb..ebebdcb6bba 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseKatakanaStemFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java index 1d4ecc2c33d..59d1088fd1b 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseReadingFormFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java index fde7d3d5964..6c0a15f2e39 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java @@ -19,7 +19,6 @@ package org.elasticsearch.plugin.analysis.kuromoji; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.index.analysis.JapaneseStopTokenFilterFactory; import org.elasticsearch.index.analysis.KuromojiAnalyzerProvider; import org.elasticsearch.index.analysis.KuromojiBaseFormFilterFactory; @@ -28,6 +27,7 @@ import org.elasticsearch.index.analysis.KuromojiKatakanaStemmerFactory; import org.elasticsearch.index.analysis.KuromojiPartOfSpeechFilterFactory; import org.elasticsearch.index.analysis.KuromojiReadingFormFilterFactory; import org.elasticsearch.index.analysis.KuromojiTokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.Plugin; /** diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 0942be5399a..3adb8202833 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -46,7 +46,10 @@ import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** */ diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index 37f7e0cd214..9374410765d 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -20,7 +20,13 @@ package org.elasticsearch.index.analysis; import org.apache.commons.codec.Encoder; -import org.apache.commons.codec.language.*; +import org.apache.commons.codec.language.Caverphone1; +import org.apache.commons.codec.language.Caverphone2; +import org.apache.commons.codec.language.ColognePhonetic; +import org.apache.commons.codec.language.DaitchMokotoffSoundex; +import org.apache.commons.codec.language.Metaphone; +import org.apache.commons.codec.language.RefinedSoundex; +import org.apache.commons.codec.language.Soundex; import org.apache.commons.codec.language.bm.Languages.LanguageSet; import org.apache.commons.codec.language.bm.NameType; import org.apache.commons.codec.language.bm.PhoneticEngine; @@ -61,7 +67,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { this.maxcodelength = 0; this.replace = settings.getAsBoolean("replace", true); // weird, encoder is null at last step in SimplePhoneticAnalysisTests, so we set it to metaphone as default - String encodername = settings.get("encoder", "metaphone"); + String encodername = settings.get("encoder", "metaphone"); if ("metaphone".equalsIgnoreCase(encodername)) { this.encoder = new Metaphone(); } else if ("soundex".equalsIgnoreCase(encodername)) { diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java index e70722c9b48..57195062cdd 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java @@ -22,7 +22,12 @@ package org.elasticsearch.index.analysis.phonetic; import org.apache.commons.codec.EncoderException; import org.apache.commons.codec.StringEncoder; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -36,7 +41,7 @@ import java.util.regex.Pattern; * Java-Programmierung von Jörg Reiher * * mit Änderungen von Jörg Prante - * + * */ public class KoelnerPhonetik implements StringEncoder { @@ -59,7 +64,7 @@ public class KoelnerPhonetik implements StringEncoder { public KoelnerPhonetik() { init(); } - + public KoelnerPhonetik(boolean useOnlyPrimaryCode) { this(); this.primary = useOnlyPrimaryCode; @@ -67,7 +72,7 @@ public class KoelnerPhonetik implements StringEncoder { /** * Get variation patterns - * + * * @return string array of variations */ protected String[] getPatterns() { @@ -99,7 +104,7 @@ public class KoelnerPhonetik implements StringEncoder { public Object encode(Object str) throws EncoderException { return encode((String) str); } - + @Override public String encode(String str) throws EncoderException { if (str == null) return null; @@ -114,14 +119,14 @@ public class KoelnerPhonetik implements StringEncoder { return sb.toString(); } - + private void init() { this.variationsPatterns = new Pattern[getPatterns().length]; for (int i = 0; i < getPatterns().length; i++) { this.variationsPatterns[i] = Pattern.compile(getPatterns()[i]); } } - + private String[] code(String str) { List parts = partition(str); String[] codes = new String[parts.size()]; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java index 1daaa4b0a3d..22fcf238725 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java index e0f9f556896..3f08f2e458c 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java index 7dade32f0e8..9d387296152 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.cn.smart.HMMChineseTokenizer; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java index 0ee789f66e9..afc7d527a6c 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis.pl; -import org.apache.lucene.analysis.pl.PolishAnalyzer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.pl.PolishAnalyzer; import org.apache.lucene.analysis.stempel.StempelFilter; import org.apache.lucene.analysis.stempel.StempelStemmer; import org.egothor.stemmer.Trie; diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java index df57aca1668..9fd42ae513a 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java @@ -27,7 +27,14 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; @@ -42,7 +49,10 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java index 26406e3811c..39221ee6904 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java @@ -36,7 +36,10 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; -import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.*; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PASSWORD; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PATH; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_TYPE; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.SUBSCRIPTION_ID; /** * diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java index c4a1837fdaf..a2851e70969 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java @@ -24,7 +24,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.*; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PASSWORD; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PATH; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_TYPE; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.SUBSCRIPTION_ID; public class AzureComputeSettingsFilter extends AbstractComponent { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java index 9f58b0bbb18..690ab623bd9 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java @@ -19,8 +19,11 @@ package org.elasticsearch.discovery.azure; -import com.microsoft.windowsazure.management.compute.models.*; - +import com.microsoft.windowsazure.management.compute.models.DeploymentSlot; +import com.microsoft.windowsazure.management.compute.models.DeploymentStatus; +import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; +import com.microsoft.windowsazure.management.compute.models.InstanceEndpoint; +import com.microsoft.windowsazure.management.compute.models.RoleInstance; import org.elasticsearch.Version; import org.elasticsearch.cloud.azure.AzureServiceDisableException; import org.elasticsearch.cloud.azure.AzureServiceRemoteException; @@ -41,8 +44,8 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Locale; import java.util.List; +import java.util.Locale; /** * diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index b6306e6209c..349a513455c 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -23,7 +23,12 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.*; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSCredentialsProviderChain; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.SystemPropertiesCredentialsProvider; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.ec2.AmazonEC2; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index f7e70281a3d..cafbae2671f 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -21,7 +21,12 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.model.*; +import com.amazonaws.services.ec2.model.DescribeInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeInstancesResult; +import com.amazonaws.services.ec2.model.Filter; +import com.amazonaws.services.ec2.model.GroupIdentifier; +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.Reservation; import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2; @@ -36,7 +41,14 @@ import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java index d69d939e5b4..baaeb9b1b01 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; - import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.is; diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 47e2554dcd4..88d87a2d8c2 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -25,11 +25,329 @@ import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ResponseMetadata; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.model.*; +import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.AllocateAddressRequest; +import com.amazonaws.services.ec2.model.AllocateAddressResult; +import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesRequest; +import com.amazonaws.services.ec2.model.AssociateAddressRequest; +import com.amazonaws.services.ec2.model.AssociateAddressResult; +import com.amazonaws.services.ec2.model.AssociateDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.AssociateRouteTableRequest; +import com.amazonaws.services.ec2.model.AssociateRouteTableResult; +import com.amazonaws.services.ec2.model.AttachClassicLinkVpcRequest; +import com.amazonaws.services.ec2.model.AttachClassicLinkVpcResult; +import com.amazonaws.services.ec2.model.AttachInternetGatewayRequest; +import com.amazonaws.services.ec2.model.AttachNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.AttachNetworkInterfaceResult; +import com.amazonaws.services.ec2.model.AttachVolumeRequest; +import com.amazonaws.services.ec2.model.AttachVolumeResult; +import com.amazonaws.services.ec2.model.AttachVpnGatewayRequest; +import com.amazonaws.services.ec2.model.AttachVpnGatewayResult; +import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupEgressRequest; +import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupIngressRequest; +import com.amazonaws.services.ec2.model.BundleInstanceRequest; +import com.amazonaws.services.ec2.model.BundleInstanceResult; +import com.amazonaws.services.ec2.model.CancelBundleTaskRequest; +import com.amazonaws.services.ec2.model.CancelBundleTaskResult; +import com.amazonaws.services.ec2.model.CancelConversionTaskRequest; +import com.amazonaws.services.ec2.model.CancelExportTaskRequest; +import com.amazonaws.services.ec2.model.CancelImportTaskRequest; +import com.amazonaws.services.ec2.model.CancelImportTaskResult; +import com.amazonaws.services.ec2.model.CancelReservedInstancesListingRequest; +import com.amazonaws.services.ec2.model.CancelReservedInstancesListingResult; +import com.amazonaws.services.ec2.model.CancelSpotFleetRequestsRequest; +import com.amazonaws.services.ec2.model.CancelSpotFleetRequestsResult; +import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsRequest; +import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsResult; +import com.amazonaws.services.ec2.model.ConfirmProductInstanceRequest; +import com.amazonaws.services.ec2.model.ConfirmProductInstanceResult; +import com.amazonaws.services.ec2.model.CopyImageRequest; +import com.amazonaws.services.ec2.model.CopyImageResult; +import com.amazonaws.services.ec2.model.CopySnapshotRequest; +import com.amazonaws.services.ec2.model.CopySnapshotResult; +import com.amazonaws.services.ec2.model.CreateCustomerGatewayRequest; +import com.amazonaws.services.ec2.model.CreateCustomerGatewayResult; +import com.amazonaws.services.ec2.model.CreateDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.CreateDhcpOptionsResult; +import com.amazonaws.services.ec2.model.CreateFlowLogsRequest; +import com.amazonaws.services.ec2.model.CreateFlowLogsResult; +import com.amazonaws.services.ec2.model.CreateImageRequest; +import com.amazonaws.services.ec2.model.CreateImageResult; +import com.amazonaws.services.ec2.model.CreateInstanceExportTaskRequest; +import com.amazonaws.services.ec2.model.CreateInstanceExportTaskResult; +import com.amazonaws.services.ec2.model.CreateInternetGatewayRequest; +import com.amazonaws.services.ec2.model.CreateInternetGatewayResult; +import com.amazonaws.services.ec2.model.CreateKeyPairRequest; +import com.amazonaws.services.ec2.model.CreateKeyPairResult; +import com.amazonaws.services.ec2.model.CreateNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.CreateNetworkAclRequest; +import com.amazonaws.services.ec2.model.CreateNetworkAclResult; +import com.amazonaws.services.ec2.model.CreateNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.CreateNetworkInterfaceResult; +import com.amazonaws.services.ec2.model.CreatePlacementGroupRequest; +import com.amazonaws.services.ec2.model.CreateReservedInstancesListingRequest; +import com.amazonaws.services.ec2.model.CreateReservedInstancesListingResult; +import com.amazonaws.services.ec2.model.CreateRouteRequest; +import com.amazonaws.services.ec2.model.CreateRouteResult; +import com.amazonaws.services.ec2.model.CreateRouteTableRequest; +import com.amazonaws.services.ec2.model.CreateRouteTableResult; +import com.amazonaws.services.ec2.model.CreateSecurityGroupRequest; +import com.amazonaws.services.ec2.model.CreateSecurityGroupResult; +import com.amazonaws.services.ec2.model.CreateSnapshotRequest; +import com.amazonaws.services.ec2.model.CreateSnapshotResult; +import com.amazonaws.services.ec2.model.CreateSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.CreateSpotDatafeedSubscriptionResult; +import com.amazonaws.services.ec2.model.CreateSubnetRequest; +import com.amazonaws.services.ec2.model.CreateSubnetResult; +import com.amazonaws.services.ec2.model.CreateTagsRequest; +import com.amazonaws.services.ec2.model.CreateVolumeRequest; +import com.amazonaws.services.ec2.model.CreateVolumeResult; +import com.amazonaws.services.ec2.model.CreateVpcEndpointRequest; +import com.amazonaws.services.ec2.model.CreateVpcEndpointResult; +import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.CreateVpcRequest; +import com.amazonaws.services.ec2.model.CreateVpcResult; +import com.amazonaws.services.ec2.model.CreateVpnConnectionRequest; +import com.amazonaws.services.ec2.model.CreateVpnConnectionResult; +import com.amazonaws.services.ec2.model.CreateVpnConnectionRouteRequest; +import com.amazonaws.services.ec2.model.CreateVpnGatewayRequest; +import com.amazonaws.services.ec2.model.CreateVpnGatewayResult; +import com.amazonaws.services.ec2.model.DeleteCustomerGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.DeleteFlowLogsRequest; +import com.amazonaws.services.ec2.model.DeleteFlowLogsResult; +import com.amazonaws.services.ec2.model.DeleteInternetGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteKeyPairRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkAclRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.DeletePlacementGroupRequest; +import com.amazonaws.services.ec2.model.DeleteRouteRequest; +import com.amazonaws.services.ec2.model.DeleteRouteTableRequest; +import com.amazonaws.services.ec2.model.DeleteSecurityGroupRequest; +import com.amazonaws.services.ec2.model.DeleteSnapshotRequest; +import com.amazonaws.services.ec2.model.DeleteSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.DeleteSubnetRequest; +import com.amazonaws.services.ec2.model.DeleteTagsRequest; +import com.amazonaws.services.ec2.model.DeleteVolumeRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointsRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointsResult; +import com.amazonaws.services.ec2.model.DeleteVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.DeleteVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.DeleteVpcRequest; +import com.amazonaws.services.ec2.model.DeleteVpnConnectionRequest; +import com.amazonaws.services.ec2.model.DeleteVpnConnectionRouteRequest; +import com.amazonaws.services.ec2.model.DeleteVpnGatewayRequest; +import com.amazonaws.services.ec2.model.DeregisterImageRequest; +import com.amazonaws.services.ec2.model.DescribeAccountAttributesRequest; +import com.amazonaws.services.ec2.model.DescribeAccountAttributesResult; +import com.amazonaws.services.ec2.model.DescribeAddressesRequest; +import com.amazonaws.services.ec2.model.DescribeAddressesResult; +import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesRequest; +import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesResult; +import com.amazonaws.services.ec2.model.DescribeBundleTasksRequest; +import com.amazonaws.services.ec2.model.DescribeBundleTasksResult; +import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesResult; +import com.amazonaws.services.ec2.model.DescribeConversionTasksRequest; +import com.amazonaws.services.ec2.model.DescribeConversionTasksResult; +import com.amazonaws.services.ec2.model.DescribeCustomerGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeCustomerGatewaysResult; +import com.amazonaws.services.ec2.model.DescribeDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.DescribeDhcpOptionsResult; +import com.amazonaws.services.ec2.model.DescribeExportTasksRequest; +import com.amazonaws.services.ec2.model.DescribeExportTasksResult; +import com.amazonaws.services.ec2.model.DescribeFlowLogsRequest; +import com.amazonaws.services.ec2.model.DescribeFlowLogsResult; +import com.amazonaws.services.ec2.model.DescribeImageAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeImageAttributeResult; +import com.amazonaws.services.ec2.model.DescribeImagesRequest; +import com.amazonaws.services.ec2.model.DescribeImagesResult; +import com.amazonaws.services.ec2.model.DescribeImportImageTasksRequest; +import com.amazonaws.services.ec2.model.DescribeImportImageTasksResult; +import com.amazonaws.services.ec2.model.DescribeImportSnapshotTasksRequest; +import com.amazonaws.services.ec2.model.DescribeImportSnapshotTasksResult; +import com.amazonaws.services.ec2.model.DescribeInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeInstanceAttributeResult; +import com.amazonaws.services.ec2.model.DescribeInstanceStatusRequest; +import com.amazonaws.services.ec2.model.DescribeInstanceStatusResult; +import com.amazonaws.services.ec2.model.DescribeInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeInstancesResult; +import com.amazonaws.services.ec2.model.DescribeInternetGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeInternetGatewaysResult; +import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest; +import com.amazonaws.services.ec2.model.DescribeKeyPairsResult; +import com.amazonaws.services.ec2.model.DescribeMovingAddressesRequest; +import com.amazonaws.services.ec2.model.DescribeMovingAddressesResult; +import com.amazonaws.services.ec2.model.DescribeNetworkAclsRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkAclsResult; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeResult; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesResult; +import com.amazonaws.services.ec2.model.DescribePlacementGroupsRequest; +import com.amazonaws.services.ec2.model.DescribePlacementGroupsResult; +import com.amazonaws.services.ec2.model.DescribePrefixListsRequest; +import com.amazonaws.services.ec2.model.DescribePrefixListsResult; +import com.amazonaws.services.ec2.model.DescribeRegionsRequest; +import com.amazonaws.services.ec2.model.DescribeRegionsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesListingsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesListingsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesModificationsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesModificationsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesOfferingsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesOfferingsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesResult; +import com.amazonaws.services.ec2.model.DescribeRouteTablesRequest; +import com.amazonaws.services.ec2.model.DescribeRouteTablesResult; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; +import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeResult; +import com.amazonaws.services.ec2.model.DescribeSnapshotsRequest; +import com.amazonaws.services.ec2.model.DescribeSnapshotsResult; +import com.amazonaws.services.ec2.model.DescribeSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.DescribeSpotDatafeedSubscriptionResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetInstancesResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestHistoryRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestHistoryResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestsRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestsResult; +import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsRequest; +import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsResult; +import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryRequest; +import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryResult; +import com.amazonaws.services.ec2.model.DescribeSubnetsRequest; +import com.amazonaws.services.ec2.model.DescribeSubnetsResult; +import com.amazonaws.services.ec2.model.DescribeTagsRequest; +import com.amazonaws.services.ec2.model.DescribeTagsResult; +import com.amazonaws.services.ec2.model.DescribeVolumeAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeVolumeAttributeResult; +import com.amazonaws.services.ec2.model.DescribeVolumeStatusRequest; +import com.amazonaws.services.ec2.model.DescribeVolumeStatusResult; +import com.amazonaws.services.ec2.model.DescribeVolumesRequest; +import com.amazonaws.services.ec2.model.DescribeVolumesResult; +import com.amazonaws.services.ec2.model.DescribeVpcAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeVpcAttributeResult; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointsResult; +import com.amazonaws.services.ec2.model.DescribeVpcPeeringConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcPeeringConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeVpcsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcsResult; +import com.amazonaws.services.ec2.model.DescribeVpnConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpnConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeVpnGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeVpnGatewaysResult; +import com.amazonaws.services.ec2.model.DetachClassicLinkVpcRequest; +import com.amazonaws.services.ec2.model.DetachClassicLinkVpcResult; +import com.amazonaws.services.ec2.model.DetachInternetGatewayRequest; +import com.amazonaws.services.ec2.model.DetachNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.DetachVolumeRequest; +import com.amazonaws.services.ec2.model.DetachVolumeResult; +import com.amazonaws.services.ec2.model.DetachVpnGatewayRequest; +import com.amazonaws.services.ec2.model.DisableVgwRoutePropagationRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.DisassociateAddressRequest; +import com.amazonaws.services.ec2.model.DisassociateRouteTableRequest; +import com.amazonaws.services.ec2.model.DryRunResult; +import com.amazonaws.services.ec2.model.DryRunSupportedRequest; +import com.amazonaws.services.ec2.model.EnableVgwRoutePropagationRequest; +import com.amazonaws.services.ec2.model.EnableVolumeIORequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.Filter; +import com.amazonaws.services.ec2.model.GetConsoleOutputRequest; +import com.amazonaws.services.ec2.model.GetConsoleOutputResult; +import com.amazonaws.services.ec2.model.GetPasswordDataRequest; +import com.amazonaws.services.ec2.model.GetPasswordDataResult; +import com.amazonaws.services.ec2.model.ImportImageRequest; +import com.amazonaws.services.ec2.model.ImportImageResult; +import com.amazonaws.services.ec2.model.ImportInstanceRequest; +import com.amazonaws.services.ec2.model.ImportInstanceResult; +import com.amazonaws.services.ec2.model.ImportKeyPairRequest; +import com.amazonaws.services.ec2.model.ImportKeyPairResult; +import com.amazonaws.services.ec2.model.ImportSnapshotRequest; +import com.amazonaws.services.ec2.model.ImportSnapshotResult; +import com.amazonaws.services.ec2.model.ImportVolumeRequest; +import com.amazonaws.services.ec2.model.ImportVolumeResult; +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.InstanceState; +import com.amazonaws.services.ec2.model.InstanceStateName; +import com.amazonaws.services.ec2.model.ModifyImageAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyReservedInstancesRequest; +import com.amazonaws.services.ec2.model.ModifyReservedInstancesResult; +import com.amazonaws.services.ec2.model.ModifySnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.ModifySpotFleetRequestRequest; +import com.amazonaws.services.ec2.model.ModifySpotFleetRequestResult; +import com.amazonaws.services.ec2.model.ModifySubnetAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVolumeAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVpcAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointResult; +import com.amazonaws.services.ec2.model.MonitorInstancesRequest; +import com.amazonaws.services.ec2.model.MonitorInstancesResult; +import com.amazonaws.services.ec2.model.MoveAddressToVpcRequest; +import com.amazonaws.services.ec2.model.MoveAddressToVpcResult; +import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingRequest; +import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingResult; +import com.amazonaws.services.ec2.model.RebootInstancesRequest; +import com.amazonaws.services.ec2.model.RegisterImageRequest; +import com.amazonaws.services.ec2.model.RegisterImageResult; +import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.ReleaseAddressRequest; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationRequest; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationResult; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationResult; +import com.amazonaws.services.ec2.model.ReportInstanceStatusRequest; +import com.amazonaws.services.ec2.model.RequestSpotFleetRequest; +import com.amazonaws.services.ec2.model.RequestSpotFleetResult; +import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest; +import com.amazonaws.services.ec2.model.RequestSpotInstancesResult; +import com.amazonaws.services.ec2.model.Reservation; +import com.amazonaws.services.ec2.model.ResetImageAttributeRequest; +import com.amazonaws.services.ec2.model.ResetInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.ResetNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.ResetSnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.RestoreAddressToClassicRequest; +import com.amazonaws.services.ec2.model.RestoreAddressToClassicResult; +import com.amazonaws.services.ec2.model.RevokeSecurityGroupEgressRequest; +import com.amazonaws.services.ec2.model.RevokeSecurityGroupIngressRequest; +import com.amazonaws.services.ec2.model.RunInstancesRequest; +import com.amazonaws.services.ec2.model.RunInstancesResult; +import com.amazonaws.services.ec2.model.StartInstancesRequest; +import com.amazonaws.services.ec2.model.StartInstancesResult; +import com.amazonaws.services.ec2.model.StopInstancesRequest; +import com.amazonaws.services.ec2.model.StopInstancesResult; +import com.amazonaws.services.ec2.model.Tag; +import com.amazonaws.services.ec2.model.TerminateInstancesRequest; +import com.amazonaws.services.ec2.model.TerminateInstancesResult; +import com.amazonaws.services.ec2.model.UnassignPrivateIpAddressesRequest; +import com.amazonaws.services.ec2.model.UnmonitorInstancesRequest; +import com.amazonaws.services.ec2.model.UnmonitorInstancesResult; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -508,12 +826,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVolumesResult describeVolumes(DescribeVolumesRequest describeVolumesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings(DescribeReservedInstancesListingsRequest describeReservedInstancesListingsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -523,47 +841,47 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeRouteTablesResult describeRouteTables(DescribeRouteTablesRequest describeRouteTablesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeDhcpOptionsResult describeDhcpOptions(DescribeDhcpOptionsRequest describeDhcpOptionsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public MonitorInstancesResult monitorInstances(MonitorInstancesRequest monitorInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePrefixListsResult describePrefixLists(DescribePrefixListsRequest describePrefixListsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotFleetResult requestSpotFleet(RequestSpotFleetRequest requestSpotFleetRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportImageTasksResult describeImportImageTasks(DescribeImportImageTasksRequest describeImportImageTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkAclsResult describeNetworkAcls(DescribeNetworkAclsRequest describeNetworkAclsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeBundleTasksResult describeBundleTasks(DescribeBundleTasksRequest describeBundleTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportInstanceResult importInstance(ImportInstanceRequest importInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -573,17 +891,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DeleteVpcPeeringConnectionResult deleteVpcPeeringConnection(DeleteVpcPeeringConnectionRequest deleteVpcPeeringConnectionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetConsoleOutputResult getConsoleOutput(GetConsoleOutputRequest getConsoleOutputRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateInternetGatewayResult createInternetGateway(CreateInternetGatewayRequest createInternetGatewayRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -603,12 +921,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateCustomerGatewayResult createCustomerGateway(CreateCustomerGatewayRequest createCustomerGatewayRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSpotDatafeedSubscriptionResult createSpotDatafeedSubscription(CreateSpotDatafeedSubscriptionRequest createSpotDatafeedSubscriptionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -623,62 +941,62 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeMovingAddressesResult describeMovingAddresses(DescribeMovingAddressesRequest describeMovingAddressesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeConversionTasksResult describeConversionTasks(DescribeConversionTasksRequest describeConversionTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpnConnectionResult createVpnConnection(CreateVpnConnectionRequest createVpnConnectionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportImageResult importImage(ImportImageRequest importImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisableVpcClassicLinkResult disableVpcClassicLink(DisableVpcClassicLinkRequest disableVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceAttributeResult describeInstanceAttribute(DescribeInstanceAttributeRequest describeInstanceAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFlowLogsResult describeFlowLogs(DescribeFlowLogsRequest describeFlowLogsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections(DescribeVpcPeeringConnectionsRequest describeVpcPeeringConnectionsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePlacementGroupsResult describePlacementGroups(DescribePlacementGroupsRequest describePlacementGroupsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RunInstancesResult runInstances(RunInstancesRequest runInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSubnetsResult describeSubnets(DescribeSubnetsRequest describeSubnetsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateRouteTableResult associateRouteTable(AssociateRouteTableRequest associateRouteTableRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -693,12 +1011,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeImagesResult describeImages(DescribeImagesRequest describeImagesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public StartInstancesResult startInstances(StartInstancesRequest startInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -708,7 +1026,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CancelReservedInstancesListingResult cancelReservedInstancesListing(CancelReservedInstancesListingRequest cancelReservedInstancesListingRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -723,17 +1041,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests(DescribeSpotInstanceRequestsRequest describeSpotInstanceRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcResult createVpc(CreateVpcRequest createVpcRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeCustomerGatewaysResult describeCustomerGateways(DescribeCustomerGatewaysRequest describeCustomerGatewaysRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -743,22 +1061,22 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateRouteResult createRoute(CreateRouteRequest createRouteRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcEndpointResult createVpcEndpoint(CreateVpcEndpointRequest createVpcEndpointRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CopyImageResult copyImage(CopyImageRequest copyImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcClassicLinkResult describeVpcClassicLink(DescribeVpcClassicLinkRequest describeVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -773,17 +1091,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeNetworkInterfaceAttributeResult describeNetworkInterfaceAttribute(DescribeNetworkInterfaceAttributeRequest describeNetworkInterfaceAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances(DescribeClassicLinkInstancesRequest describeClassicLinkInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotInstancesResult requestSpotInstances(RequestSpotInstancesRequest requestSpotInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -793,12 +1111,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVolumeAttributeResult describeVolumeAttribute(DescribeVolumeAttributeRequest describeVolumeAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachNetworkInterfaceResult attachNetworkInterface(AttachNetworkInterfaceRequest attachNetworkInterfaceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -808,12 +1126,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeTagsResult describeTags(DescribeTagsRequest describeTagsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelBundleTaskResult cancelBundleTask(CancelBundleTaskRequest cancelBundleTaskRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -823,22 +1141,22 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ImportSnapshotResult importSnapshot(ImportSnapshotRequest importSnapshotRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelSpotInstanceRequestsResult cancelSpotInstanceRequests(CancelSpotInstanceRequestsRequest cancelSpotInstanceRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestsResult describeSpotFleetRequests(DescribeSpotFleetRequestsRequest describeSpotFleetRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public PurchaseReservedInstancesOfferingResult purchaseReservedInstancesOffering(PurchaseReservedInstancesOfferingRequest purchaseReservedInstancesOfferingRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -848,17 +1166,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications(DescribeReservedInstancesModificationsRequest describeReservedInstancesModificationsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public TerminateInstancesResult terminateInstances(TerminateInstancesRequest terminateInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVpcEndpointResult modifyVpcEndpoint(ModifyVpcEndpointRequest modifyVpcEndpointRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -873,32 +1191,32 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSnapshotAttributeResult describeSnapshotAttribute(DescribeSnapshotAttributeRequest describeSnapshotAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReplaceRouteTableAssociationResult replaceRouteTableAssociation(ReplaceRouteTableAssociationRequest replaceRouteTableAssociationRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAddressesResult describeAddresses(DescribeAddressesRequest describeAddressesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImageAttributeResult describeImageAttribute(DescribeImageAttributeRequest describeImageAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeKeyPairsResult describeKeyPairs(DescribeKeyPairsRequest describeKeyPairsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ConfirmProductInstanceResult confirmProductInstance(ConfirmProductInstanceRequest confirmProductInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -908,7 +1226,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVpcAttributeResult describeVpcAttribute(DescribeVpcAttributeRequest describeVpcAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -923,27 +1241,27 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateVolumeResult createVolume(CreateVolumeRequest createVolumeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceStatusResult describeInstanceStatus(DescribeInstanceStatusRequest describeInstanceStatusRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnGatewaysResult describeVpnGateways(DescribeVpnGatewaysRequest describeVpnGatewaysRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSubnetResult createSubnet(CreateSubnetRequest createSubnetRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings(DescribeReservedInstancesOfferingsRequest describeReservedInstancesOfferingsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -953,7 +1271,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSpotFleetRequestHistoryResult describeSpotFleetRequestHistory(DescribeSpotFleetRequestHistoryRequest describeSpotFleetRequestHistoryRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -963,7 +1281,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ReplaceNetworkAclAssociationResult replaceNetworkAclAssociation(ReplaceNetworkAclAssociationRequest replaceNetworkAclAssociationRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -978,7 +1296,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public BundleInstanceResult bundleInstance(BundleInstanceRequest bundleInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -998,17 +1316,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CopySnapshotResult copySnapshot(CopySnapshotRequest copySnapshotRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices(DescribeVpcEndpointServicesRequest describeVpcEndpointServicesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AllocateAddressResult allocateAddress(AllocateAddressRequest allocateAddressRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1023,7 +1341,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateKeyPairResult createKeyPair(CreateKeyPairRequest createKeyPairRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1033,17 +1351,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSnapshotsResult describeSnapshots(DescribeSnapshotsRequest describeSnapshotsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkAclResult createNetworkAcl(CreateNetworkAclRequest createNetworkAclRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RegisterImageResult registerImage(RegisterImageRequest registerImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1053,7 +1371,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public EnableVpcClassicLinkResult enableVpcClassicLink(EnableVpcClassicLinkRequest enableVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1063,137 +1381,137 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVpcEndpointsResult describeVpcEndpoints(DescribeVpcEndpointsRequest describeVpcEndpointsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachClassicLinkVpcResult detachClassicLinkVpc(DetachClassicLinkVpcRequest detachClassicLinkVpcRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesResult describeReservedInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAvailabilityZonesResult describeAvailabilityZones() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotPriceHistoryResult describeSpotPriceHistory() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkInterfacesResult describeNetworkInterfaces() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRegionsResult describeRegions() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInternetGatewaysResult describeInternetGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSecurityGroupsResult describeSecurityGroups() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAccountAttributesResult describeAccountAttributes() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumeStatusResult describeVolumeStatus() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportSnapshotTasksResult describeImportSnapshotTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnConnectionsResult describeVpnConnections() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcsResult describeVpcs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeExportTasksResult describeExportTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcPeeringConnectionResult createVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelImportTaskResult cancelImportTask() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumesResult describeVolumes() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRouteTablesResult describeRouteTables() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeDhcpOptionsResult describeDhcpOptions() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePrefixListsResult describePrefixLists() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportImageTasksResult describeImportImageTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkAclsResult describeNetworkAcls() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeBundleTasksResult describeBundleTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1203,92 +1521,92 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateInternetGatewayResult createInternetGateway() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeMovingAddressesResult describeMovingAddresses() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeConversionTasksResult describeConversionTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportImageResult importImage() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFlowLogsResult describeFlowLogs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePlacementGroupsResult describePlacementGroups() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSubnetsResult describeSubnets() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstancesResult describeInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImagesResult describeImages() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeCustomerGatewaysResult describeCustomerGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcClassicLinkResult describeVpcClassicLink() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeTagsResult describeTags() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportSnapshotResult importSnapshot() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestsResult describeSpotFleetRequests() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1298,52 +1616,52 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeAddressesResult describeAddresses() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeKeyPairsResult describeKeyPairs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceStatusResult describeInstanceStatus() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnGatewaysResult describeVpnGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AllocateAddressResult allocateAddress() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSnapshotsResult describeSnapshots() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointsResult describeVpcEndpoints() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DryRunResult dryRun(DryRunSupportedRequest request) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1353,11 +1671,11 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifySpotFleetRequestResult modifySpotFleetRequest(ModifySpotFleetRequestRequest modifySpotFleetRequestRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 6f88be2be5a..bea0df9e8d8 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.services.ec2.model.Tag; - import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index 07e05f06c6d..76172172bb8 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -30,9 +30,8 @@ import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.services.compute.Compute; import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.InstanceList; - -import org.elasticsearch.SpecialPermission; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.network.GceNameResolver; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -48,7 +47,11 @@ import java.security.GeneralSecurityException; import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; public class GceComputeServiceImpl extends AbstractLifecycleComponent implements GceComputeService { diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 476773dcc73..8ea93825bd1 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -22,7 +22,6 @@ package org.elasticsearch.discovery.gce; import com.google.api.services.compute.model.AccessConfig; import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.NetworkInterface; - import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java index 1d73e1d540e..22d759fc2dd 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java @@ -21,10 +21,14 @@ package org.elasticsearch.discovery.gce; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential; -import com.google.api.client.http.*; +import com.google.api.client.http.HttpBackOffIOExceptionHandler; +import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.Sleeper; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; @@ -65,7 +69,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer { this.sleeper = sleeper; this.maxWait = maxWait; } - + // Use only for testing static MockGoogleCredential.Builder newMockCredentialBuilder() { // TODO: figure out why GCE is so bad like this diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 5f01a98a5f2..5bb5e27ce64 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.discovery.gce; import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cloud.gce.GceModule; diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java index ef92bd74305..9e48bc7d3df 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java @@ -34,7 +34,6 @@ import com.google.api.client.testing.http.MockLowLevelHttpRequest; import com.google.api.client.testing.http.MockLowLevelHttpResponse; import com.google.api.client.testing.util.MockSleeper; import com.google.api.services.compute.Compute; - import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java index f0a734372fd..0d3c945ee2f 100644 --- a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java +++ b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java @@ -19,14 +19,10 @@ package org.elasticsearch.plugin.discovery.multicast; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.plugin.discovery.multicast.MulticastZenPing; import org.elasticsearch.plugins.Plugin; -import java.util.Collection; - public class MulticastDiscoveryPlugin extends Plugin { private final Settings settings; @@ -44,7 +40,7 @@ public class MulticastDiscoveryPlugin extends Plugin { public String description() { return "Multicast Discovery Plugin"; } - + public void onModule(DiscoveryModule module) { if (settings.getAsBoolean("discovery.zen.ping.multicast.enabled", false)) { module.addZenPing(MulticastZenPing.class); diff --git a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java index f28bc08e9a6..82bf1bf088c 100644 --- a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java +++ b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java @@ -20,7 +20,6 @@ package org.elasticsearch.plugin.discovery.multicast; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.Constants; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.SpecialPermission; @@ -149,8 +148,8 @@ public class MulticastZenPing extends AbstractLifecycleComponent implem // may not even be bound to an interface on this machine! use the first bound address. List addresses = Arrays.asList(networkService.resolveBindHostAddresses(address == null ? null : new String[] { address })); NetworkUtils.sortAddresses(addresses); - - final MulticastChannel.Config config = new MulticastChannel.Config(port, group, bufferSize, ttl, + + final MulticastChannel.Config config = new MulticastChannel.Config(port, group, bufferSize, ttl, addresses.get(0), deferToInterface); SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java index 9d1c36a90dd..d5e0a62ecb5 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java @@ -22,7 +22,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.cat.AbstractCatAction; import org.elasticsearch.rest.action.support.RestTable; diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java index 9dd9cb740ed..c1bcc65bfe2 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java @@ -28,7 +28,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.rest.action.cat.AbstractCatAction; -import java.io.Closeable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 825a8d358d9..eca1265766d 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -27,14 +27,27 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ClassPermission; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScoreAccessor; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.javascript.support.NativeList; import org.elasticsearch.script.javascript.support.NativeMap; import org.elasticsearch.script.javascript.support.ScriptValueConverter; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; -import org.mozilla.javascript.*; +import org.mozilla.javascript.Context; +import org.mozilla.javascript.ContextFactory; +import org.mozilla.javascript.GeneratedClassLoader; +import org.mozilla.javascript.PolicySecurityController; import org.mozilla.javascript.Script; +import org.mozilla.javascript.Scriptable; +import org.mozilla.javascript.ScriptableObject; +import org.mozilla.javascript.SecurityController; +import org.mozilla.javascript.WrapFactory; import java.io.IOException; import java.net.MalformedURLException; diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java index f3a39896641..a90948c1877 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java @@ -19,9 +19,19 @@ package org.elasticsearch.script.javascript.support; -import org.mozilla.javascript.*; +import org.mozilla.javascript.Context; +import org.mozilla.javascript.IdScriptableObject; +import org.mozilla.javascript.NativeArray; +import org.mozilla.javascript.ScriptRuntime; +import org.mozilla.javascript.Scriptable; +import org.mozilla.javascript.Wrapper; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Value Converter to marshal objects between Java and Javascript. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java index baa06f45ff8..9788e63c3d7 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java @@ -19,14 +19,16 @@ package org.elasticsearch.plan.a; -import java.util.HashMap; -import java.util.Map; - import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; class Adapter { static class StatementMetadata { @@ -226,7 +228,7 @@ class Adapter { return sourceemd; } - + ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { final ExpressionMetadata sourceemd = expressionMetadata.get(source); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java index a7e2986d633..a20c32965b2 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java @@ -19,6 +19,8 @@ package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.ParserRuleContext; + import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; @@ -26,11 +28,81 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.antlr.v4.runtime.ParserRuleContext; - -import static org.elasticsearch.plan.a.Adapter.*; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; +import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; +import static org.elasticsearch.plan.a.Adapter.StatementMetadata; +import static org.elasticsearch.plan.a.Adapter.error; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Constructor; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Pair; +import static org.elasticsearch.plan.a.Definition.Sort; +import static org.elasticsearch.plan.a.Definition.Struct; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ADD; +import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import static org.elasticsearch.plan.a.PlanAParser.BWAND; +import static org.elasticsearch.plan.a.PlanAParser.BWOR; +import static org.elasticsearch.plan.a.PlanAParser.BWXOR; +import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import static org.elasticsearch.plan.a.PlanAParser.BlockContext; +import static org.elasticsearch.plan.a.PlanAParser.BoolContext; +import static org.elasticsearch.plan.a.PlanAParser.BreakContext; +import static org.elasticsearch.plan.a.PlanAParser.CastContext; +import static org.elasticsearch.plan.a.PlanAParser.CharContext; +import static org.elasticsearch.plan.a.PlanAParser.CompContext; +import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import static org.elasticsearch.plan.a.PlanAParser.DIV; +import static org.elasticsearch.plan.a.PlanAParser.DeclContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import static org.elasticsearch.plan.a.PlanAParser.DoContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.ExprContext; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import static org.elasticsearch.plan.a.PlanAParser.FalseContext; +import static org.elasticsearch.plan.a.PlanAParser.ForContext; +import static org.elasticsearch.plan.a.PlanAParser.IfContext; +import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import static org.elasticsearch.plan.a.PlanAParser.LSH; +import static org.elasticsearch.plan.a.PlanAParser.MUL; +import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import static org.elasticsearch.plan.a.PlanAParser.NullContext; +import static org.elasticsearch.plan.a.PlanAParser.NumericContext; +import static org.elasticsearch.plan.a.PlanAParser.PostincContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import static org.elasticsearch.plan.a.PlanAParser.PreincContext; +import static org.elasticsearch.plan.a.PlanAParser.REM; +import static org.elasticsearch.plan.a.PlanAParser.RSH; +import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import static org.elasticsearch.plan.a.PlanAParser.SUB; +import static org.elasticsearch.plan.a.PlanAParser.SingleContext; +import static org.elasticsearch.plan.a.PlanAParser.SourceContext; +import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.USH; +import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Analyzer extends PlanAParserBaseVisitor { private static class Variable { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java index 6f4a23765b5..4d6936a0d72 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java @@ -19,17 +19,17 @@ package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.bootstrap.BootstrapInfo; + import java.net.MalformedURLException; import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; -import org.antlr.v4.runtime.ANTLRInputStream; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.ParserRuleContext; -import org.elasticsearch.bootstrap.BootstrapInfo; - final class Compiler { private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java index 2a1eb13408c..bd9b146e41e 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java @@ -24,7 +24,12 @@ import java.lang.reflect.Array; import java.util.List; import java.util.Map; -import static org.elasticsearch.plan.a.Definition.*; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Struct; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; public class Def { public static Object methodCall(final Object owner, final String name, final Definition definition, @@ -551,7 +556,7 @@ public class Def { throw new ClassCastException("Cannot apply [%] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + public static Object add(final Object left, final Object right) { if (left instanceof String || right instanceof String) { return "" + left + right; @@ -738,7 +743,7 @@ public class Def { throw new ClassCastException("Cannot apply [>>>] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + public static Object and(final Object left, final Object right) { if (left instanceof Boolean && right instanceof Boolean) { return (boolean)left && (boolean)right; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java index 95e3c93a354..4963815f470 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java @@ -19,12 +19,12 @@ package org.elasticsearch.plan.a; * under the License. */ -import java.text.ParseException; - import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.misc.Interval; +import java.text.ParseException; + class ErrorHandlingLexer extends PlanALexer { public ErrorHandlingLexer(CharStream charStream) { super(charStream); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java index 3fe36034792..5032ae3222a 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java @@ -19,8 +19,6 @@ package org.elasticsearch.plan.a; * under the License. */ -import java.text.ParseException; - import org.antlr.v4.runtime.DefaultErrorStrategy; import org.antlr.v4.runtime.InputMismatchException; import org.antlr.v4.runtime.NoViableAltException; @@ -28,6 +26,8 @@ import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Token; +import java.text.ParseException; + class ParserErrorStrategy extends DefaultErrorStrategy { @Override public void recover(Parser recognizer, RecognitionException re) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java index a9e5ff623bf..e35df0102d2 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java @@ -1,16 +1,19 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.plan.a; - import java.util.Set; - -import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.Lexer; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.LexerATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.*; + +import java.util.Set; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class PlanALexer extends Lexer { @@ -20,15 +23,15 @@ class PlanALexer extends Lexer { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, - MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, - LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, - BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, - AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, - ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, - STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int EXT = 1; public static String[] modeNames = { @@ -36,36 +39,36 @@ class PlanALexer extends Lexer { }; public static final String[] ruleNames = { - "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", + "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "GENERIC", "ID", "EXTINTEGER", "EXTID" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", - "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", - "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", - "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java index 13f61acb495..da9943385c0 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java @@ -1,13 +1,25 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.plan.a; -import org.antlr.v4.runtime.atn.*; + +import org.antlr.v4.runtime.FailedPredicateException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.Parser; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.ParserATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.misc.*; -import org.antlr.v4.runtime.tree.*; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; + import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class PlanAParser extends Parser { @@ -17,49 +29,49 @@ class PlanAParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, - MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, - LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, - BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, - AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, - ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, - STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int - RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, - RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, - RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, - RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, - RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, + RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, + RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, + RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, + RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, + RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, RULE_arguments = 21, RULE_increment = 22; public static final String[] ruleNames = { - "source", "statement", "block", "empty", "initializer", "afterthought", - "declaration", "decltype", "declvar", "expression", "extstart", "extprec", - "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", + "source", "statement", "block", "empty", "initializer", "afterthought", + "declaration", "decltype", "declvar", "expression", "extstart", "extprec", + "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", "extnew", "extstring", "arguments", "increment" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", - "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", - "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", - "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -137,7 +149,7 @@ class PlanAParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(47); + setState(47); _errHandler.sync(this); _la = _input.LA(1); do { @@ -147,7 +159,7 @@ class PlanAParser extends Parser { statement(); } } - setState(49); + setState(49); _errHandler.sync(this); _la = _input.LA(1); } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); @@ -171,7 +183,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_statement; } - + public StatementContext() { } public void copyFrom(StatementContext ctx) { super.copyFrom(ctx); @@ -660,7 +672,7 @@ class PlanAParser extends Parser { match(TRY); setState(115); block(); - setState(123); + setState(123); _errHandler.sync(this); _alt = 1; do { @@ -688,7 +700,7 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(125); + setState(125); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,12,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -748,7 +760,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_block; } - + public BlockContext() { } public void copyFrom(BlockContext ctx) { super.copyFrom(ctx); @@ -1163,7 +1175,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expression; } - + public ExpressionContext() { } public void copyFrom(ExpressionContext ctx) { super.copyFrom(ctx); @@ -1742,7 +1754,7 @@ class PlanAParser extends Parser { } break; } - } + } } setState(249); _errHandler.sync(this); @@ -2476,7 +2488,7 @@ class PlanAParser extends Parser { case LBRACE: { { - setState(325); + setState(325); _errHandler.sync(this); _alt = 1; do { @@ -2496,7 +2508,7 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(327); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java index 7795f74700b..69736f311e6 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -28,7 +28,6 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java index 3756e02f8dc..4f3361576c4 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java @@ -34,9 +34,79 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.plan.a.Adapter.*; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; +import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; +import static org.elasticsearch.plan.a.Adapter.StatementMetadata; +import static org.elasticsearch.plan.a.Adapter.error; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Constructor; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Sort; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ADD; +import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import static org.elasticsearch.plan.a.PlanAParser.BWAND; +import static org.elasticsearch.plan.a.PlanAParser.BWOR; +import static org.elasticsearch.plan.a.PlanAParser.BWXOR; +import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import static org.elasticsearch.plan.a.PlanAParser.BlockContext; +import static org.elasticsearch.plan.a.PlanAParser.BoolContext; +import static org.elasticsearch.plan.a.PlanAParser.BreakContext; +import static org.elasticsearch.plan.a.PlanAParser.CastContext; +import static org.elasticsearch.plan.a.PlanAParser.CharContext; +import static org.elasticsearch.plan.a.PlanAParser.CompContext; +import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import static org.elasticsearch.plan.a.PlanAParser.DIV; +import static org.elasticsearch.plan.a.PlanAParser.DeclContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import static org.elasticsearch.plan.a.PlanAParser.DoContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.ExprContext; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import static org.elasticsearch.plan.a.PlanAParser.FalseContext; +import static org.elasticsearch.plan.a.PlanAParser.ForContext; +import static org.elasticsearch.plan.a.PlanAParser.IfContext; +import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import static org.elasticsearch.plan.a.PlanAParser.LSH; +import static org.elasticsearch.plan.a.PlanAParser.MUL; +import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import static org.elasticsearch.plan.a.PlanAParser.NullContext; +import static org.elasticsearch.plan.a.PlanAParser.NumericContext; +import static org.elasticsearch.plan.a.PlanAParser.PostincContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import static org.elasticsearch.plan.a.PlanAParser.PreincContext; +import static org.elasticsearch.plan.a.PlanAParser.REM; +import static org.elasticsearch.plan.a.PlanAParser.RSH; +import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import static org.elasticsearch.plan.a.PlanAParser.SUB; +import static org.elasticsearch.plan.a.PlanAParser.SingleContext; +import static org.elasticsearch.plan.a.PlanAParser.SourceContext; +import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.USH; +import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Writer extends PlanAParserBaseVisitor { private static class Branch { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java index af7eb25a6c0..d6e05f973a2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.plan.a; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; -import java.util.HashMap; -import java.util.Map; - /** Tests for addition operator across all types */ //TODO: NaN/Inf/overflow/... public class AdditionTests extends ScriptTestCase { @@ -40,7 +35,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); } - + public void testIntConst() throws Exception { assertEquals(1+1, exec("return 1+1;")); assertEquals(1+2, exec("return 1+2;")); @@ -52,7 +47,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1+0, exec("return 1+0;")); assertEquals(0+0, exec("return 0+0;")); } - + public void testByte() throws Exception { assertEquals((byte)1+(byte)1, exec("byte x = 1; byte y = 1; return x+y;")); assertEquals((byte)1+(byte)2, exec("byte x = 1; byte y = 2; return x+y;")); @@ -64,7 +59,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((byte)1+(byte)0, exec("byte x = 1; byte y = 0; return x+y;")); assertEquals((byte)0+(byte)0, exec("byte x = 0; byte y = 0; return x+y;")); } - + public void testByteConst() throws Exception { assertEquals((byte)1+(byte)1, exec("return (byte)1+(byte)1;")); assertEquals((byte)1+(byte)2, exec("return (byte)1+(byte)2;")); @@ -76,7 +71,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((byte)1+(byte)0, exec("return (byte)1+(byte)0;")); assertEquals((byte)0+(byte)0, exec("return (byte)0+(byte)0;")); } - + public void testChar() throws Exception { assertEquals((char)1+(char)1, exec("char x = 1; char y = 1; return x+y;")); assertEquals((char)1+(char)2, exec("char x = 1; char y = 2; return x+y;")); @@ -88,7 +83,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((char)1+(char)0, exec("char x = 1; char y = 0; return x+y;")); assertEquals((char)0+(char)0, exec("char x = 0; char y = 0; return x+y;")); } - + public void testCharConst() throws Exception { assertEquals((char)1+(char)1, exec("return (char)1+(char)1;")); assertEquals((char)1+(char)2, exec("return (char)1+(char)2;")); @@ -100,7 +95,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((char)1+(char)0, exec("return (char)1+(char)0;")); assertEquals((char)0+(char)0, exec("return (char)0+(char)0;")); } - + public void testShort() throws Exception { assertEquals((short)1+(short)1, exec("short x = 1; short y = 1; return x+y;")); assertEquals((short)1+(short)2, exec("short x = 1; short y = 2; return x+y;")); @@ -112,7 +107,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((short)1+(short)0, exec("short x = 1; short y = 0; return x+y;")); assertEquals((short)0+(short)0, exec("short x = 0; short y = 0; return x+y;")); } - + public void testShortConst() throws Exception { assertEquals((short)1+(short)1, exec("return (short)1+(short)1;")); assertEquals((short)1+(short)2, exec("return (short)1+(short)2;")); @@ -124,7 +119,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((short)1+(short)0, exec("return (short)1+(short)0;")); assertEquals((short)0+(short)0, exec("return (short)0+(short)0;")); } - + public void testLong() throws Exception { assertEquals(1L+1L, exec("long x = 1; long y = 1; return x+y;")); assertEquals(1L+2L, exec("long x = 1; long y = 2; return x+y;")); @@ -136,7 +131,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1L+0L, exec("long x = 1; long y = 0; return x+y;")); assertEquals(0L+0L, exec("long x = 0; long y = 0; return x+y;")); } - + public void testLongConst() throws Exception { assertEquals(1L+1L, exec("return 1L+1L;")); assertEquals(1L+2L, exec("return 1L+2L;")); @@ -184,7 +179,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1.0+0.0, exec("double x = 1.0; double y = 0.0; return x+y;")); assertEquals(0.0+0.0, exec("double x = 0.0; double y = 0.0; return x+y;")); } - + public void testDoubleConst() throws Exception { assertEquals(1.0+1.0, exec("return 1.0+1.0;")); assertEquals(1.0+2.0, exec("return 1.0+2.0;")); diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index c4f109cc782..3722709e420 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -19,18 +19,6 @@ package org.elasticsearch.script.python; -import java.io.IOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.SpecialPermission; @@ -53,6 +41,14 @@ import org.python.core.PyObject; import org.python.core.PyStringMap; import org.python.util.PythonInterpreter; +import java.io.IOException; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.Permissions; +import java.security.PrivilegedAction; +import java.security.ProtectionDomain; +import java.util.Map; + /** * */ diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index ffae8205e33..d43b5df7e4a 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -30,12 +30,22 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; -import static org.elasticsearch.index.mapper.MapperBuilders.*; +import static org.elasticsearch.index.mapper.MapperBuilders.dateField; +import static org.elasticsearch.index.mapper.MapperBuilders.integerField; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java index 38e292725a5..fa9a2d06f8e 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java @@ -19,6 +19,16 @@ package org.elasticsearch.mapper.attachments; * under the License. */ +import org.apache.tika.Tika; +import org.apache.tika.exception.TikaException; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.parser.Parser; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.bootstrap.JarHell; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; + import java.io.ByteArrayInputStream; import java.io.FilePermission; import java.io.IOException; @@ -37,16 +47,6 @@ import java.security.ProtectionDomain; import java.security.SecurityPermission; import java.util.PropertyPermission; -import org.apache.tika.Tika; -import org.apache.tika.exception.TikaException; -import org.apache.tika.metadata.Metadata; -import org.apache.tika.parser.AutoDetectParser; -import org.apache.tika.parser.Parser; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.bootstrap.JarHell; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.PathUtils; - /** * Runs tika with limited parsers and limited permissions. *

      @@ -69,13 +69,13 @@ final class TikaImpl { new org.apache.tika.parser.xml.DcXMLParser(), new org.apache.tika.parser.epub.EpubParser(), }; - + /** autodetector based on this subset */ private static final AutoDetectParser PARSER_INSTANCE = new AutoDetectParser(PARSERS); - + /** singleton tika instance */ private static final Tika TIKA_INSTANCE = new Tika(PARSER_INSTANCE.getDetector(), PARSER_INSTANCE); - + /** * parses with tika, throwing any exception hit while parsing the document */ diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index 10e82e24c84..eda6f7669d7 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -32,7 +32,11 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; /** * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/18 diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java index bdbafea710a..1eecda65a05 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.mapper.attachments.MapperAttachmentsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index acf0163acd9..42d13fce58a 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -32,7 +32,12 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; /** * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/38 diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index 40593ddb1bb..266c7cdd335 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -36,7 +36,10 @@ import org.junit.Before; import java.nio.charset.StandardCharsets; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; /** * diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 01e87dc1430..c855b45e846 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -30,12 +30,13 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; -import org.junit.Test; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; /** * diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java index a5e3ec9c17c..fbbdeb83a7d 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.mapper.attachments; * under the License. */ +import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; +import org.apache.lucene.util.TestUtil; +import org.apache.tika.metadata.Metadata; +import org.elasticsearch.test.ESTestCase; + import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; -import org.apache.lucene.util.TestUtil; -import org.apache.tika.metadata.Metadata; - -import org.elasticsearch.test.ESTestCase; - -/** +/** * Evil test-coverage cheat, we parse a bunch of docs from tika * so that we have a nice grab-bag variety, and assert some content * comes back and no exception. @@ -43,7 +42,7 @@ public class TikaDocTests extends ESTestCase { public void testFiles() throws Exception { Path tmp = createTempDir(); TestUtil.unzip(getClass().getResourceAsStream(TIKA_FILES), tmp); - + try (DirectoryStream stream = Files.newDirectoryStream(tmp)) { for (Path doc : stream) { logger.debug("parsing: {}", doc); @@ -51,7 +50,7 @@ public class TikaDocTests extends ESTestCase { } } } - + void assertParseable(Path fileName) throws Exception { try { byte bytes[] = Files.readAllBytes(fileName); @@ -60,7 +59,7 @@ public class TikaDocTests extends ESTestCase { assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); } catch (Throwable e) { - throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); + throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index 5341e038cff..8743ed75934 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -33,7 +33,14 @@ import java.io.IOException; import java.io.InputStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.*; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.AUTHOR; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.CONTENT_LENGTH; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.CONTENT_TYPE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.DATE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.KEYWORDS; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.LANGUAGE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.NAME; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.TITLE; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.isEmptyOrNullString; diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java index 5dfc48570e6..97c5ad994a4 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java index c267160eeb7..9899776f7dd 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index 73f7a73547c..5bb251dca14 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -21,16 +21,13 @@ package org.elasticsearch.index.mapper.size; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.junit.Test; import java.io.IOException; import java.util.Collection; -import java.util.Collections; import java.util.Locale; import java.util.Map; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index e07b76bfc92..9661de381c5 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,13 +19,6 @@ package org.elasticsearch.index.mapper.size; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -import java.util.Collections; - import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; @@ -40,8 +33,15 @@ import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + public class SizeMappingTests extends ESSingleNodeTestCase { - + MapperRegistry mapperRegistry; IndexService indexService; DocumentMapperParser parser; @@ -72,7 +72,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - + public void testSizeEnabledAndStoredBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).field("store", "yes").endObject() @@ -97,7 +97,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - + public void testSizeDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() @@ -113,7 +113,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size"), nullValue()); } - + public void testSizeNotSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); @@ -128,7 +128,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size"), nullValue()); } - + public void testThatDisablingWorksWhenMerging() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() @@ -143,4 +143,4 @@ public class SizeMappingTests extends ESSingleNodeTestCase { enabledMapper.merge(disabledMapper.mapping(), false, false); assertThat(enabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } -} \ No newline at end of file +} diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 5a551f54de3..8c20bb6d873 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure.storage; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import java.io.InputStream; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 56e75d7386c..3159b038998 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -22,7 +22,11 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.CloudStorageAccount; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; -import com.microsoft.azure.storage.blob.*; +import com.microsoft.azure.storage.blob.BlobProperties; +import com.microsoft.azure.storage.blob.CloudBlobClient; +import com.microsoft.azure.storage.blob.CloudBlobContainer; +import com.microsoft.azure.storage.blob.CloudBlockBlob; +import com.microsoft.azure.storage.blob.ListBlobItem; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; @@ -47,7 +51,7 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent secondariesStorageSettings; final Map clients; - + @Inject public AzureStorageServiceImpl(Settings settings) { super(settings); @@ -81,7 +85,7 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) throws URISyntaxException, StorageException { // NOTE: this should be here: if (prefix == null) prefix = ""; - // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and + // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and // then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix! - + logger.debug("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix); MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 829ccb7e95e..60930a3a946 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -19,8 +19,8 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.cluster.metadata.MetaData; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java index 8a17f83d92d..05da2ccfceb 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.storage.AzureStorageService; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.cloud.azure.storage.AzureStorageServiceMock; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 90e44d97a3f..5a1c76df413 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure.storage; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java index 17c0b01850f..dfb7d4517d6 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.settings.Settings; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class AzureSettingsParserTest extends LuceneTestCase { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index 7e4285829a8..cec7361de0a 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -21,9 +21,8 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; - +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 9b65f7bec2f..242dc2f3269 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -18,6 +18,16 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.Repository; + import java.io.IOException; import java.lang.reflect.Method; import java.net.URI; @@ -31,23 +41,13 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.Repository; - // // Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode. // This poses two problems: // - Hadoop itself comes with tons of jars, many providing the same classes across packages. In particular Hadoop 2 provides package annotations in the same // package across jars which trips JarHell. Thus, to allow Hadoop jars to load, the plugin uses a dedicated CL which picks them up from the hadoop-libs folder. // - The issue though with using a different CL is that it picks up the jars from a different location / codeBase and thus it does not fall under the plugin -// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. +// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. // There are different approaches here: // - implement a custom classloader that loads the jars but 'lies' about the codesource. It is doable but since URLClassLoader is locked down, one would // would have to implement the whole jar opening and loading from it. Not impossible but still fairly low-level. @@ -64,7 +64,7 @@ import org.elasticsearch.repositories.Repository; // - package plugin.hadoop.hdfs is part of the plugin // - all the other packages are assumed to be in the nested Hadoop CL. -// Code +// Code public class HdfsPlugin extends Plugin { @Override @@ -81,7 +81,7 @@ public class HdfsPlugin extends Plugin { public void onModule(RepositoriesModule repositoriesModule) { String baseLib = Utils.detectLibFolder(); List cp = getHadoopClassLoaderPath(baseLib); - + ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader()); Class repository = null; @@ -170,4 +170,4 @@ public class HdfsPlugin extends Plugin { } } } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java index cf786179787..89fa3f5910f 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java @@ -19,6 +19,8 @@ package org.elasticsearch.plugin.hadoop.hdfs; * under the License. */ +import org.elasticsearch.SpecialPermission; + import java.net.URL; import java.security.AccessControlContext; import java.security.AccessController; @@ -26,8 +28,6 @@ import java.security.DomainCombiner; import java.security.PrivilegedAction; import java.security.ProtectionDomain; -import org.elasticsearch.SpecialPermission; - public abstract class Utils { protected static AccessControlContext hadoopACC() { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java index 5e7c4d3fa57..b0b5fb10c33 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.repositories.hdfs; -import java.io.IOException; - import org.apache.hadoop.fs.FileSystem; +import java.io.IOException; + interface FileSystemFactory { FileSystem getFileSystem() throws IOException; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java index 3eda2272149..7b9ec8331d9 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java @@ -19,10 +19,10 @@ package org.elasticsearch.repositories.hdfs; -import java.io.IOException; - import org.apache.hadoop.fs.FileSystem; +import java.io.IOException; + interface FsCallback { V doInHdfs(FileSystem fs) throws IOException; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 11081445fd4..b5b5b4d0f4a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -18,18 +18,6 @@ */ package org.elasticsearch.repositories.hdfs; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.nio.file.Files; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.Locale; -import java.util.Map; -import java.util.Map.Entry; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,6 +39,18 @@ import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.nio.file.Files; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; + public class HdfsRepository extends BlobStoreRepository implements FileSystemFactory { public final static String TYPE = "hdfs"; @@ -256,4 +256,4 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac IOUtils.closeStream(fs); fs = null; } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index 6a0d4ffa818..550224082d9 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -19,17 +19,17 @@ package org.elasticsearch.repositories.hdfs; +import org.apache.hadoop.fs.FileSystem; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.plugin.hadoop.hdfs.Utils; + import java.io.IOException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; -import org.apache.hadoop.fs.FileSystem; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.plugin.hadoop.hdfs.Utils; - class SecurityUtils { abstract static class AccBridge extends Utils { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java index 8d8d6755cc3..065c06208ef 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java @@ -19,17 +19,15 @@ package org.elasticsearch.plugin.hadoop.hdfs; * under the License. */ -import java.io.IOException; -import java.util.Collection; - -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import java.io.IOException; +import java.util.Collection; public class HdfsRepositoryRestIT extends ESRestTestCase { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java index 8730a46a084..e980b6a26e3 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java @@ -23,8 +23,6 @@ import java.net.URL; import java.util.Collections; import java.util.List; -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; - public class HdfsTestPlugin extends HdfsPlugin { @Override diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index d1b23e92538..b4b530e916b 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; - -import java.util.Collection; - import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; @@ -42,6 +37,11 @@ import org.elasticsearch.test.store.MockFSDirectoryService; import org.junit.After; import org.junit.Before; +import java.util.Collection; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + /** * You must specify {@code -Dtests.thirdparty=true} */ @@ -215,4 +215,4 @@ public class HdfsTests extends ESIntegTestCase { private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} \ No newline at end of file +} diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 7d0b72cd63c..51594c01302 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -21,7 +21,12 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.*; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSCredentialsProviderChain; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.SystemPropertiesCredentialsProvider; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java index 8063ba7de33..dd278a9231d 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java @@ -20,7 +20,16 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PartETag; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; import com.amazonaws.util.Base64; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java index 4f5c46a8c59..4861ccc202b 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java @@ -20,7 +20,12 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectSummary; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java index 198e1862da2..a8bf3ea2959 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java @@ -21,8 +21,14 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.CreateBucketRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.amazonaws.services.s3.model.StorageClass; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java index a38a8ed3c51..a7305727353 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java @@ -30,7 +30,6 @@ import org.elasticsearch.repositories.s3.S3Repository; import java.security.AccessController; import java.security.PrivilegedAction; -import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java index cd3584d7bec..6346ffe57d4 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; - import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.is; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java index 0c9e7535db0..97829f9d689 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java @@ -27,7 +27,86 @@ import com.amazonaws.regions.Region; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.S3ClientOptions; import com.amazonaws.services.s3.S3ResponseMetadata; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; +import com.amazonaws.services.s3.model.AccessControlList; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.BucketCrossOriginConfiguration; +import com.amazonaws.services.s3.model.BucketLifecycleConfiguration; +import com.amazonaws.services.s3.model.BucketLoggingConfiguration; +import com.amazonaws.services.s3.model.BucketNotificationConfiguration; +import com.amazonaws.services.s3.model.BucketPolicy; +import com.amazonaws.services.s3.model.BucketReplicationConfiguration; +import com.amazonaws.services.s3.model.BucketTaggingConfiguration; +import com.amazonaws.services.s3.model.BucketVersioningConfiguration; +import com.amazonaws.services.s3.model.BucketWebsiteConfiguration; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.CompleteMultipartUploadResult; +import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.CopyObjectResult; +import com.amazonaws.services.s3.model.CopyPartRequest; +import com.amazonaws.services.s3.model.CopyPartResult; +import com.amazonaws.services.s3.model.CreateBucketRequest; +import com.amazonaws.services.s3.model.DeleteBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketPolicyRequest; +import com.amazonaws.services.s3.model.DeleteBucketRequest; +import com.amazonaws.services.s3.model.DeleteBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteObjectRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; +import com.amazonaws.services.s3.model.DeleteObjectsResult; +import com.amazonaws.services.s3.model.DeleteVersionRequest; +import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest; +import com.amazonaws.services.s3.model.GetBucketAclRequest; +import com.amazonaws.services.s3.model.GetBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketLocationRequest; +import com.amazonaws.services.s3.model.GetBucketLoggingConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketNotificationConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketPolicyRequest; +import com.amazonaws.services.s3.model.GetBucketReplicationConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketVersioningConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.GetObjectMetadataRequest; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.GetS3AccountOwnerRequest; +import com.amazonaws.services.s3.model.HeadBucketRequest; +import com.amazonaws.services.s3.model.HeadBucketResult; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadResult; +import com.amazonaws.services.s3.model.ListBucketsRequest; +import com.amazonaws.services.s3.model.ListMultipartUploadsRequest; +import com.amazonaws.services.s3.model.ListNextBatchOfObjectsRequest; +import com.amazonaws.services.s3.model.ListNextBatchOfVersionsRequest; +import com.amazonaws.services.s3.model.ListObjectsRequest; +import com.amazonaws.services.s3.model.ListPartsRequest; +import com.amazonaws.services.s3.model.ListVersionsRequest; +import com.amazonaws.services.s3.model.MultipartUploadListing; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.Owner; +import com.amazonaws.services.s3.model.PartListing; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.RestoreObjectRequest; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.SetBucketAclRequest; +import com.amazonaws.services.s3.model.SetBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketLoggingConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketNotificationConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketPolicyRequest; +import com.amazonaws.services.s3.model.SetBucketReplicationConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketVersioningConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.SetObjectAclRequest; +import com.amazonaws.services.s3.model.StorageClass; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; +import com.amazonaws.services.s3.model.VersionListing; import org.elasticsearch.common.SuppressForbidden; import java.io.File; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java index d2ed3ba952f..31682ee4de6 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java @@ -22,7 +22,12 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 9ffa1286bc6..55f88fbfeea 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -23,7 +23,6 @@ import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; - import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java index 744be5e49de..9d9bdc1d389 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java @@ -23,7 +23,6 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.SmbDirectoryWrapper; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryService; diff --git a/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java b/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java index 4c6c230c5dc..23590b8f52f 100644 --- a/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java +++ b/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java @@ -19,14 +19,13 @@ package org.apache.lucene.store; * under the License. */ +import com.carrotsearch.randomizedtesting.annotations.Listeners; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; -import com.carrotsearch.randomizedtesting.annotations.Listeners; -import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - /** * Extends Lucene's BaseDirectoryTestCase with ES test behavior. */ diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 9faa604a18e..78085b201a3 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -34,8 +34,13 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.USAGE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; @SuppressForbidden(reason = "modifies system properties intentionally") public class BootstrapCliParserTests extends CliToolTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java index 95d0789fbf8..8633511756d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; - import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java index 52486ba7d62..3789c273cf8 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java @@ -30,7 +30,9 @@ import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; @SuppressForbidden(reason = "modifies system properties intentionally") public class EvilInternalSettingsPreparerTests extends ESTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java index d5a07606e65..0eebc9731ff 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java @@ -35,16 +35,28 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.*; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.HashSet; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import static java.nio.file.attribute.PosixFilePermission.*; +import static java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE; +import static java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE; +import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDirectoryExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; // there are some lucene file systems that seem to cause problems (deleted files, dirs instead of files) @LuceneTestCase.SuppressFileSystems("*") diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index b2b2c0cff5c..cee98bc0163 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -36,9 +36,19 @@ import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.jboss.netty.bootstrap.ServerBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpRequest; +import org.jboss.netty.handler.codec.http.HttpRequestDecoder; +import org.jboss.netty.handler.codec.http.HttpResponseEncoder; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.ssl.SslContext; import org.jboss.netty.handler.ssl.SslHandler; import org.jboss.netty.handler.ssl.util.InsecureTrustManagerFactory; @@ -75,8 +85,14 @@ import static org.elasticsearch.common.cli.CliToolTestCase.args; import static org.elasticsearch.common.io.FileTestUtils.assertFileContent; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDirectoryExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0.0) diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 95df2d04458..cddea9fd774 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.smoketest; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 9d3d424f62e..d0be6f13946 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -17,10 +17,10 @@ * under the License. */ -import org.elasticsearch.gradle.vagrant.VagrantCommandTask -import org.elasticsearch.gradle.vagrant.BatsOverVagrantTask + import org.elasticsearch.gradle.FileContentsTask -import org.gradle.api.InvalidUserDataException +import org.elasticsearch.gradle.vagrant.BatsOverVagrantTask +import org.elasticsearch.gradle.vagrant.VagrantCommandTask String testScripts = '*.bats' String testCommand = "cd \$TESTROOT && sudo bats --tap \$BATS/$testScripts" diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 68784083797..b2ce5ebd86e 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,13 +20,8 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; - import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestSecurityManager; import org.elasticsearch.SecureSM; -import org.elasticsearch.bootstrap.Bootstrap; -import org.elasticsearch.bootstrap.ESPolicy; -import org.elasticsearch.bootstrap.Security; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; @@ -55,16 +50,16 @@ import java.util.Set; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; -/** +/** * Initializes natives and installs test security manager * (init'd early by base classes to ensure it happens regardless of which - * test case happens to be first, test ordering, etc). + * test case happens to be first, test ordering, etc). *

      * The idea is to mimic as much as possible what happens with ES in production * mode (e.g. assign permissions and install security manager the same way) */ public class BootstrapForTesting { - + // TODO: can we share more code with the non-test side here // without making things complex??? @@ -83,10 +78,10 @@ public class BootstrapForTesting { // initialize probes Bootstrap.initializeProbes(); - + // initialize sysprops BootstrapInfo.getSystemProperties(); - + // check for jar hell try { JarHell.checkJarHell(); @@ -130,7 +125,7 @@ public class BootstrapForTesting { if (System.getProperty("tests.maven") == null) { perms.add(new RuntimePermission("setIO")); } - + // add bind permissions for testing // ephemeral ports (note, on java 7 before update 51, this is a different permission) // this should really be the only one allowed for tests, otherwise they have race conditions @@ -138,7 +133,7 @@ public class BootstrapForTesting { // ... but tests are messy. like file permissions, just let them live in a fantasy for now. // TODO: cut over all tests to bind to ephemeral ports perms.add(new SocketPermission("localhost:1024-", "listen,resolve")); - + // read test-framework permissions final Policy testFramework = Security.readPolicy(Bootstrap.class.getResource("test-framework.policy"), JarHell.parseClassPath()); final Policy esPolicy = new ESPolicy(perms, getPluginPermissions(), true); @@ -172,7 +167,7 @@ public class BootstrapForTesting { } } - /** + /** * we dont know which codesources belong to which plugin, so just remove the permission from key codebases * like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing. */ @@ -182,7 +177,7 @@ public class BootstrapForTesting { if (pluginPolicies.isEmpty()) { return Collections.emptyMap(); } - + // compute classpath minus obvious places, all other jars will get the permission. Set codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks())); Set excluded = new HashSet<>(Arrays.asList( @@ -198,13 +193,13 @@ public class BootstrapForTesting { Assert.class.getProtectionDomain().getCodeSource().getLocation() )); codebases.removeAll(excluded); - + // parse each policy file, with codebase substitution from the classpath final List policies = new ArrayList<>(); for (URL policyFile : pluginPolicies) { policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()]))); } - + // consult each policy file for those codebases Map map = new HashMap<>(); for (URL url : codebases) { diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index aec90aa50d4..0096fcf9b5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -22,7 +22,6 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.lookup.SearchLookup; diff --git a/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java index caa414e071c..2148d0a71c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.transport.TransportAddress; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.Random; import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index bf9ccc957bc..a630f24214d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -20,7 +20,11 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.store.*; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index c02682bb641..5dc824f687d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -37,8 +37,8 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -49,7 +49,11 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.lang.reflect.Constructor; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Random; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 03089708b66..5d08f78a90e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -40,7 +40,6 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -109,7 +108,6 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeMocksPlugin; @@ -134,7 +132,6 @@ import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; -import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e1443110c0d..3777653297e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,14 +29,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; - import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.TestRuleMarkFailure; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TimeUnits; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; @@ -50,7 +48,6 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -69,7 +66,11 @@ import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java index ed54ae60fbd..1af9fa5ba7e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; @@ -46,7 +45,7 @@ public abstract class ESTokenStreamTestCase extends BaseTokenStreamTestCase { static { BootstrapForTesting.ensureInitialized(); } - + public static Version randomVersion() { return VersionUtils.randomVersion(random()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 10d4482a24c..5ab862e3e4d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -65,7 +65,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; @@ -78,15 +77,12 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.Transport; @@ -98,7 +94,19 @@ import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Random; +import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; @@ -110,11 +118,14 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static junit.framework.Assert.fail; -import static org.apache.lucene.util.LuceneTestCase.*; +import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; +import static org.apache.lucene.util.LuceneTestCase.rarely; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java index 3bace95c238..10469286e1a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; -import java.nio.charset.StandardCharsets; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -27,6 +26,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; public class StreamsUtils { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index 858fbab9ab5..a05309a8a51 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -37,7 +37,7 @@ import java.net.InetSocketAddress; import java.util.Random; import java.util.Set; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; /** * Base test cluster that exposes the basis to run tests against any elasticsearch cluster, whose layout diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 51fb0c905f4..7fec7e8e4ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; - import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -59,7 +58,6 @@ import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java index cb3d643f555..06def24d6f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java @@ -19,7 +19,16 @@ package org.elasticsearch.test.cluster; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index 5dc8cce99c6..6e17eae1be4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -20,7 +20,17 @@ package org.elasticsearch.test.cluster; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -39,7 +49,10 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Queue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 484f65ea650..e549c185616 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -24,15 +24,12 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; -import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.Set; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java index 0187d4ac035..a0f027bcbd8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java @@ -19,8 +19,6 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.engine.Engine; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java index de51670f57d..87a12791bc1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java @@ -19,19 +19,10 @@ package org.elasticsearch.test.engine; import org.apache.lucene.index.FilterDirectoryReader; -import org.elasticsearch.common.inject.BindingAnnotation; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineFactory; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - public final class MockEngineFactory implements EngineFactory { private final Class wrapper; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index 70dfa6847b4..37ccbf47538 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -21,7 +21,11 @@ package org.elasticsearch.test.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.*; +import org.apache.lucene.search.AssertingIndexSearcher; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryCache; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.SearcherManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; @@ -49,7 +53,7 @@ public final class MockEngineSupport { public static final String WRAP_READER_RATIO = "index.engine.mock.random.wrap_reader_ratio"; public static final String READER_WRAPPER_TYPE = "index.engine.mock.random.wrapper"; public static final String FLUSH_ON_CLOSE_RATIO = "index.engine.mock.flush_on_close.ratio"; - + private final AtomicBoolean closing = new AtomicBoolean(false); private final ESLogger logger = Loggers.getLogger(Engine.class); private final ShardId shardId; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java index f05f69bf275..2116dcc390c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java @@ -19,18 +19,13 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; -import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherManager; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.ShadowEngine; -import java.io.IOException; -import java.util.Map; - final class MockShadowEngine extends ShadowEngine { private final MockEngineSupport support; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 422b9375a1e..73281b3f6ea 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -19,8 +19,16 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.*; -import org.apache.lucene.util.Bits; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.CompiledAutomaton; diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java index 1853d291c6d..d86791fa6bd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java @@ -22,7 +22,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.hamcrest.Description; -import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; public class ElasticsearchMatchers { diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java index d2615eabcac..b999f24822b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java @@ -20,7 +20,9 @@ package org.elasticsearch.test.junit.annotations; import com.carrotsearch.randomizedtesting.annotations.TestGroup; -import java.lang.annotation.*; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; /** * Annotation used to set if internet network connectivity is required to run the test. diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 47a77dfc9d2..969d59d885e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -18,10 +18,7 @@ */ package org.elasticsearch.test.junit.listeners; -import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.ReproduceErrorMessageBuilder; -import com.carrotsearch.randomizedtesting.TraceFormatting; - import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -145,7 +142,7 @@ public class ReproduceInfoPrinter extends RunListener { // these properties only make sense for integration tests appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } - appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", + appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", "tests.client.ratio", "tests.heap.size", "tests.bwc", "tests.bwc.version"); if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) { appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\""); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 266f8e8038c..b4aecd52a14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressFsync; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java index f6e3ddabd5e..ed1ce728c0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java @@ -19,7 +19,11 @@ package org.elasticsearch.test.rest.client; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; public class RestPath { private final List parts; @@ -94,4 +98,4 @@ public class RestPath { this.pathPart = pathPart; } } -} \ No newline at end of file +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java index f6ce416dbff..e4c8849a92f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.test.rest.client.http; -import org.apache.http.client.methods.*; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpOptions; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.elasticsearch.client.support.Headers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java index 9f3a8b6df9d..3d5e21e5146 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java @@ -21,7 +21,9 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.junit.Assert.assertThat; /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java index aacb5f0a3bf..1a899c3cc2b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java @@ -21,7 +21,10 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java index 16efcae96c5..e00fbbea01c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java @@ -24,7 +24,9 @@ import org.elasticsearch.common.logging.Loggers; import java.util.regex.Pattern; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 27a2e6fb22e..58a72789f65 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -21,10 +21,13 @@ package org.elasticsearch.test.store; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import java.nio.charset.StandardCharsets; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.store.*; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.LockFactory; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleMarkFailure; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -35,11 +38,11 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; import org.junit.Assert; @@ -47,8 +50,11 @@ import org.junit.Assert; import java.io.Closeable; import java.io.IOException; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.Random; public class MockFSDirectoryService extends FsDirectoryService { diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 86cf0ddb563..3fe700701dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -25,7 +25,11 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 2363d98a113..229dc944b90 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -18,17 +18,27 @@ */ package org.elasticsearch.test.transport; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportServiceAdapter; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.BlockingQueue; /** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java index cbe14f8e9cc..2a925dd2586 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.rest.test; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import org.junit.After; -import org.junit.Ignore; import static org.hamcrest.Matchers.nullValue; diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index af468fa084c..c1cfa56c8be 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -27,9 +27,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; -import java.io.IOException; import java.nio.file.Path; -import java.util.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Random; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java index 55d86aa4c9a..b9643dda7c7 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.test.test; import com.carrotsearch.randomizedtesting.annotations.Repeat; - import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.TestCluster; From a639b3d3e462c1a67dcc8390ce8821ffe6fc6547 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 12:47:35 -0800 Subject: [PATCH 145/322] Remove wildcard imports for groovy too --- .../gradle/junit4/TestProgressLogger.groovy | 6 ++++- .../gradle/junit4/TestReportLogger.groovy | 27 ++++++++++++++++--- .../org/elasticsearch/gradle/AntTask.groovy | 6 ++++- .../elasticsearch/gradle/BuildPlugin.groovy | 13 +++++++-- .../gradle/test/ClusterFormationTasks.groovy | 7 ++++- 5 files changed, 50 insertions(+), 9 deletions(-) diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy index 4d69d039a6f..14f5d476be3 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy @@ -29,7 +29,11 @@ import org.gradle.logging.ProgressLogger import org.gradle.logging.ProgressLoggerFactory import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds -import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.* +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.FAILURE +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED_ASSUMPTION +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.OK import static java.lang.Math.max /** diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 15c24f9c793..450d3645182 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -5,8 +5,21 @@ import com.carrotsearch.ant.tasks.junit4.Pluralize import com.carrotsearch.ant.tasks.junit4.TestsSummaryEventListener import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Strings import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.eventbus.Subscribe -import com.carrotsearch.ant.tasks.junit4.events.* -import com.carrotsearch.ant.tasks.junit4.events.aggregated.* +import com.carrotsearch.ant.tasks.junit4.events.EventType +import com.carrotsearch.ant.tasks.junit4.events.IEvent +import com.carrotsearch.ant.tasks.junit4.events.IStreamEvent +import com.carrotsearch.ant.tasks.junit4.events.SuiteStartedEvent +import com.carrotsearch.ant.tasks.junit4.events.TestFinishedEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedQuitEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteStartedEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.ChildBootstrap +import com.carrotsearch.ant.tasks.junit4.events.aggregated.HeartBeatEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.PartialOutputEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus import com.carrotsearch.ant.tasks.junit4.events.mirrors.FailureMirror import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener import com.carrotsearch.ant.tasks.junit4.listeners.StackTraceFilter @@ -15,11 +28,17 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description -import javax.sound.sampled.* +import javax.sound.sampled.AudioSystem +import javax.sound.sampled.Clip +import javax.sound.sampled.Line +import javax.sound.sampled.LineEvent +import javax.sound.sampled.LineListener import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger -import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.* +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatTime import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode class TestReportLogger extends TestsSummaryEventListener implements AggregatedEventListener { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 3914442d7da..4d580c56cf4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -19,7 +19,11 @@ package org.elasticsearch.gradle -import org.apache.tools.ant.* +import org.apache.tools.ant.BuildException +import org.apache.tools.ant.BuildListener +import org.apache.tools.ant.BuildLogger +import org.apache.tools.ant.DefaultLogger +import org.apache.tools.ant.Project import org.gradle.api.DefaultTask import org.gradle.api.GradleException import org.gradle.api.tasks.TaskAction diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c246213fd15..eef79bba737 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -20,8 +20,17 @@ package org.elasticsearch.gradle import nebula.plugin.extraconfigurations.ProvidedBasePlugin import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.* -import org.gradle.api.artifacts.* +import org.gradle.api.GradleException +import org.gradle.api.JavaVersion +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.Task +import org.gradle.api.XmlProvider +import org.gradle.api.artifacts.Configuration +import org.gradle.api.artifacts.ModuleDependency +import org.gradle.api.artifacts.ModuleVersionIdentifier +import org.gradle.api.artifacts.ProjectDependency +import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.artifacts.maven.MavenPom import org.gradle.api.tasks.bundling.Jar diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index c7c62e0da0a..4b7c05ec2eb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -23,7 +23,12 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.gradle.api.* +import org.gradle.api.AntBuilder +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException +import org.gradle.api.Project +import org.gradle.api.Task import org.gradle.api.artifacts.Configuration import org.gradle.api.file.FileCollection import org.gradle.api.logging.Logger From d0a10b337ed414eaf8ef3c60452d1a2f92be6ae8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 13:16:37 -0800 Subject: [PATCH 146/322] Fix silly typo in variable name... --- .../src/main/groovy/org/elasticsearch/gradle/AntTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index b713c00ed8d..f86846ac6dd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -52,7 +52,7 @@ public class AntTask extends DefaultTask { // remove existing loggers, we add our own List toRemove = new ArrayList<>(); for (BuildListener listener : ant.project.getBuildListeners()) { - if (l instanceof BuildLogger) { + if (listener instanceof BuildLogger) { toRemove.add(listener); } } From 690fb2cd3f957c318a3c71c8a422cb7bfc928cdd Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 13:22:20 -0800 Subject: [PATCH 147/322] Rename InternalFilters.Bucket to InternalFilters.InternalBucket to avoid name collision --- .../bucket/filters/FiltersAggregator.java | 10 +-- .../bucket/filters/InternalFilters.java | 62 +++++++++---------- .../DerivativePipelineAggregator.java | 2 +- .../profile/InternalProfileCollector.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 2 +- .../aggregations/metrics/GeoBoundsIT.java | 2 +- .../aggregations/pipeline/DerivativeIT.java | 22 +++---- .../repositories/s3/S3Repository.java | 2 +- 8 files changed, 52 insertions(+), 52 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index 3cd67f835ec..eec7064d1bd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -112,16 +112,16 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - List buckets = new ArrayList<>(filters.length); + List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); - InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } // other bucket if (showOtherBucket) { long bucketOrd = bucketOrd(owningBucketOrdinal, keys.length); - InternalFilters.Bucket bucket = new InternalFilters.Bucket(otherBucketKey, bucketDocCount(bucketOrd), + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } @@ -131,9 +131,9 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); - List buckets = new ArrayList<>(filters.length); + List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { - InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], 0, subAggs, keyed); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], 0, subAggs, keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java index f685482636f..aed398d51df 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java @@ -40,7 +40,7 @@ import java.util.Map; /** * */ -public class InternalFilters extends InternalMultiBucketAggregation implements Filters { +public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public final static Type TYPE = new Type("filters"); @@ -53,16 +53,16 @@ public class InternalFilters extends InternalMultiBucketAggregation BUCKET_STREAM = new BucketStreams.Stream() { + private final static BucketStreams.Stream BUCKET_STREAM = new BucketStreams.Stream() { @Override - public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException { - Bucket filters = new Bucket(context.keyed()); + public InternalBucket readResult(StreamInput in, BucketStreamContext context) throws IOException { + InternalBucket filters = new InternalBucket(context.keyed()); filters.readFrom(in); return filters; } @Override - public BucketStreamContext getBucketStreamContext(Bucket bucket) { + public BucketStreamContext getBucketStreamContext(InternalBucket bucket) { BucketStreamContext context = new BucketStreamContext(); context.keyed(bucket.keyed); return context; @@ -74,19 +74,19 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets, ReduceContext context) { - Bucket reduced = null; + InternalBucket reduce(List buckets, ReduceContext context) { + InternalBucket reduced = null; List aggregationsList = new ArrayList<>(buckets.size()); - for (Bucket bucket : buckets) { + for (InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new Bucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); } else { reduced.docCount += bucket.docCount; } @@ -156,13 +156,13 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets; - private Map bucketMap; + private List buckets; + private Map bucketMap; private boolean keyed; public InternalFilters() {} // for serialization - public InternalFilters(String name, List buckets, boolean keyed, List pipelineAggregators, Map metaData) { + public InternalFilters(String name, List buckets, boolean keyed, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.buckets = buckets; this.keyed = keyed; @@ -174,25 +174,25 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets) { + public InternalFilters create(List buckets) { return new InternalFilters(this.name, buckets, this.keyed, this.pipelineAggregators(), this.metaData); } @Override - public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { + return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); } @Override - public List getBuckets() { + public List getBuckets() { return buckets; } @Override - public Bucket getBucketByKey(String key) { + public InternalBucket getBucketByKey(String key) { if (bucketMap == null) { bucketMap = new HashMap<>(buckets.size()); - for (Bucket bucket : buckets) { + for (InternalBucket bucket : buckets) { bucketMap.put(bucket.getKey(), bucket); } } @@ -201,26 +201,26 @@ public class InternalFilters extends InternalMultiBucketAggregation aggregations, ReduceContext reduceContext) { - List> bucketsList = null; + List> bucketsList = null; for (InternalAggregation aggregation : aggregations) { InternalFilters filters = (InternalFilters) aggregation; if (bucketsList == null) { bucketsList = new ArrayList<>(filters.buckets.size()); - for (Bucket bucket : filters.buckets) { - List sameRangeList = new ArrayList<>(aggregations.size()); + for (InternalBucket bucket : filters.buckets) { + List sameRangeList = new ArrayList<>(aggregations.size()); sameRangeList.add(bucket); bucketsList.add(sameRangeList); } } else { int i = 0; - for (Bucket bucket : filters.buckets) { + for (InternalBucket bucket : filters.buckets) { bucketsList.get(i++).add(bucket); } } } - InternalFilters reduced = new InternalFilters(name, new ArrayList(bucketsList.size()), keyed, pipelineAggregators(), getMetaData()); - for (List sameRangeList : bucketsList) { + InternalFilters reduced = new InternalFilters(name, new ArrayList(bucketsList.size()), keyed, pipelineAggregators(), getMetaData()); + for (List sameRangeList : bucketsList) { reduced.buckets.add((sameRangeList.get(0)).reduce(sameRangeList, reduceContext)); } return reduced; @@ -230,9 +230,9 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); + List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - Bucket bucket = new Bucket(keyed); + InternalBucket bucket = new InternalBucket(keyed); bucket.readFrom(in); buckets.add(bucket); } @@ -244,7 +244,7 @@ public class InternalFilters extends InternalMultiBucketAggregation 0) { assertThat(docCountDeriv, notNullValue()); @@ -224,7 +224,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Derivative docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); @@ -267,7 +267,7 @@ public class DerivativeIT extends ESIntegTestCase { // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); @@ -312,7 +312,7 @@ public class DerivativeIT extends ESIntegTestCase { // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); @@ -366,7 +366,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); @@ -395,7 +395,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -425,7 +425,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty_rnd[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -454,7 +454,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i + ": ", bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i + ": ", bucket, i, valueCounts_empty[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -484,7 +484,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { @@ -526,7 +526,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { @@ -565,7 +565,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 760968b0bf3..612f8a9eea7 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -85,7 +85,7 @@ public class S3Repository extends BlobStoreRepository { String region = repositorySettings.settings().get("region", settings.get(REPOSITORY_S3.REGION)); if (region == null) { - // Bucket setting is not set - use global region setting + // InternalBucket setting is not set - use global region setting String regionSetting = settings.get(CLOUD_AWS.REGION); if (regionSetting != null) { regionSetting = regionSetting.toLowerCase(Locale.ENGLISH); From 5cfefe08b0f512c2e95bd3b8b03459ee7970661b Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 18 Dec 2015 16:45:52 -0500 Subject: [PATCH 148/322] Fix collector's class name on response output --- .../org/elasticsearch/search/profile/CollectorResult.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java index 4949c6388d2..8da14d23d96 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java @@ -123,8 +123,8 @@ public class CollectorResult implements ToXContent, Writeable { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder = builder.startObject() - .field(NAME.getPreferredName(), toString()) - .field(REASON.getPreferredName(), reason) + .field(NAME.getPreferredName(), getName()) + .field(REASON.getPreferredName(), getReason()) .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0))); if (!children.isEmpty()) { From af11707da08b70c8ef90ff513b5e66c457c1c985 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 14:36:36 -0800 Subject: [PATCH 149/322] Remove unused method impl in AntTask and make abstract --- .../main/groovy/org/elasticsearch/gradle/AntTask.groovy | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 80541b43f42..1df6306400b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -35,7 +35,7 @@ import java.nio.charset.Charset * * Logging for the task is customizable for subclasses by overriding makeLogger. */ -public class AntTask extends DefaultTask { +public abstract class AntTask extends DefaultTask { /** * A buffer that will contain the output of the ant code run, @@ -76,12 +76,7 @@ public class AntTask extends DefaultTask { } /** Runs the doAnt closure. This can be overridden by subclasses instead of having to set a closure. */ - protected void runAnt(AntBuilder ant) { - if (doAnt == null) { - throw new GradleException("Missing doAnt for ${name}") - } - doAnt(ant) - } + protected abstract void runAnt(AntBuilder ant); /** Create the logger the ant runner will use, with the given stream for error/output. */ protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { From 7584810ff4d32e1f7c4bd2cda6a0a7187a0258d3 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sat, 19 Dec 2015 01:26:58 +0200 Subject: [PATCH 150/322] * Make plugin hadoop2-only Polish MiniDFS cluster to be Hadoop2 (instead of Hadoop1) based --- plugins/repository-hdfs/build.gradle | 163 +++++++----------- .../plugin/hadoop/hdfs/HdfsPlugin.java | 28 +-- .../plugin/hadoop/hdfs/Utils.java | 9 +- .../hdfs/{FsCallback.java => FcCallback.java} | 6 +- ...emFactory.java => FileContextFactory.java} | 6 +- .../repositories/hdfs/HdfsBlobContainer.java | 108 ++++++++---- .../repositories/hdfs/HdfsBlobStore.java | 26 +-- .../repositories/hdfs/HdfsRepository.java | 56 +++--- .../repositories/hdfs/SecurityUtils.java | 10 +- .../plugin-metadata/plugin-security.policy | 32 ++-- .../hadoop/hdfs/HdfsRepositoryRestIT.java | 11 +- .../plugin/hadoop/hdfs/HdfsTestPlugin.java | 3 +- .../plugin/hadoop/hdfs/HdfsTests.java | 21 ++- .../plugin/hadoop/hdfs/MiniHDFS.java | 114 ++++++++++++ .../plugin/hadoop/hdfs/MiniHDFSCluster.java | 48 ------ .../plugin/hadoop/hdfs/UtilsTests.java | 3 +- .../test/hdfs_repository/20_repository.yaml | 25 +++ 17 files changed, 377 insertions(+), 292 deletions(-) rename plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/{FsCallback.java => FcCallback.java} (88%) rename plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/{FileSystemFactory.java => FileContextFactory.java} (87%) create mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java delete mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index ca444768590..cb7d0e4628f 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -24,51 +24,23 @@ esplugin { classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin' } -configurations { - hadoop1 - hadoop2 -} - versions << [ - 'hadoop1': '1.2.1', 'hadoop2': '2.7.1' ] +configurations { + hadoop2 +} + dependencies { - provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - provided "org.apache.hadoop:hadoop-core:${versions.hadoop1}" - - // use Hadoop1 to compile and test things (a subset of Hadoop2) - testCompile "org.apache.hadoop:hadoop-core:${versions.hadoop1}" - testCompile "org.apache.hadoop:hadoop-test:${versions.hadoop1}" - // Hadoop dependencies - testCompile "commons-configuration:commons-configuration:1.6" - testCompile "commons-lang:commons-lang:${versions.commonslang}" - testCompile "commons-collections:commons-collections:3.2.2" - testCompile "commons-net:commons-net:1.4.1" - testCompile "org.mortbay.jetty:jetty:6.1.26" - testCompile "org.mortbay.jetty:jetty-util:6.1.26" - testCompile "org.mortbay.jetty:servlet-api:2.5-20081211" - testCompile "com.sun.jersey:jersey-core:1.8" - - - hadoop1("org.apache.hadoop:hadoop-core:${versions.hadoop1}") { - exclude module: "commons-cli" - exclude group: "com.sun.jersey" - exclude group: "org.mortbay.jetty" - exclude group: "tomcat" - exclude module: "commons-el" - exclude module: "hsqldb" - exclude group: "org.eclipse.jdt" - exclude module: "commons-beanutils" - exclude module: "commons-beanutils-core" - exclude module: "junit" - // provided by ES itself - exclude group: "log4j" - } - - hadoop2("org.apache.hadoop:hadoop-client:${versions.hadoop2}") { - exclude module: "commons-cli" + hadoop2 ("org.apache.hadoop:hadoop-client:${versions.hadoop2}") { + exclude module: "hadoop-yarn-common" + exclude module: "hadoop-mapreduce-client-app" + exclude module: "hadoop-mapreduce-client-core" + exclude module: "hadoop-mapreduce-client-jobclient" + exclude module: "hadoop-yarn-api" + + exclude group: "commons-cli" exclude group: "com.sun.jersey" exclude group: "com.sun.jersey.contribs" exclude group: "com.sun.jersey.jersey-test-framework" @@ -82,37 +54,57 @@ dependencies { exclude module: "commons-beanutils-core" exclude module: "javax.servlet" exclude module: "junit" + exclude module: "netty" // provided by ES itself exclude group: "log4j" } - hadoop2("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}") { + hadoop2 ("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}") { + // prevent jar hell + exclude module: "hadoop-yarn-common" + exclude module: "commons-cli" + exclude module: "netty" exclude module: "guava" exclude module: "junit" // provided by ES itself exclude group: "log4j" } -} + + provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}" + provided configurations.hadoop2 + + testCompile ("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}:tests") { + exclude module: "commons-cli" + exclude module: "netty" + } + + testCompile ("org.apache.hadoop:hadoop-common:${versions.hadoop2}:tests") { + exclude module: "commons-cli" + } +} configurations.all { + // used due to _transitive_ configuration resolutionStrategy { + force "commons-cli:commons-cli:1.3.1" + force "io.netty:netty:3.10.5.Final" force "commons-codec:commons-codec:${versions.commonscodec}" force "commons-logging:commons-logging:${versions.commonslogging}" force "commons-lang:commons-lang:2.6" force "commons-httpclient:commons-httpclient:3.0.1" - force "org.codehaus.jackson:jackson-core-asl:1.8.8" - force "org.codehaus.jackson:jackson-mapper-asl:1.8.8" + force "org.codehaus.jackson:jackson-core-asl:1.9.13" + force "org.codehaus.jackson:jackson-mapper-asl:1.9.13" force "com.google.code.findbugs:jsr305:3.0.0" force "com.google.guava:guava:16.0.1" - force "org.slf4j:slf4j-api:1.7.10" - force "org.slf4j:slf4j-log4j12:1.7.10" + force "org.slf4j:slf4j-api:${versions.slf4j}" + force "org.slf4j:slf4j-log4j12:${versions.slf4j}" + force "junit:junit:${versions.junit}" + force "org.apache.httpcomponents:httpclient:4.3.6" + force "log4j:log4j:${versions.log4j}" } } - - dependencyLicenses { - mapping from: /hadoop-core.*/, to: 'hadoop-1' - mapping from: /hadoop-.*/, to: 'hadoop-2' + mapping from: /hadoop-.*/, to: 'hadoop' } compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' @@ -145,62 +137,31 @@ bundlePlugin { } } - -task distZipHadoop1(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - include "*" - include "internal-libs/**" - } - - description = "Builds archive (with Hadoop1 dependencies) suitable for download page." - classifier = "hadoop1" - - into ("hadoop-libs") { - from configurations.hadoop1.allArtifacts.files - from configurations.hadoop1 - } +task miniHdfsStart(type: JavaExec) { + classpath = sourceSets.test.compileClasspath + sourceSets.test.output + main = "org.elasticsearch.plugin.hadoop.hdfs.MiniHDFS" + errorOutput = new FileOutputStream("build/minihdfs.err") + standardOutput = new FileOutputStream("build/minihdfs.out") + //ext.hdfsPid = (main as Class).getPid } -task distZipHadoop2(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - include "*" - include "internal-libs/**" - } - - description = "Builds archive (with Hadoop2/YARN dependencies) suitable for download page." - classifier = "hadoop2" +//task miniHdfsStop(type: org.elasticsearch.gradle.LoggedExec) { +// onlyIf { hdfsPid > -1 } +// if (Os.isFamily(Os.FAMILY_WINDOWS)) { +// executable 'Taskkill' +// args '/PID', hdfsCluster.pid, '/F' +// } else { +// executable 'kill' +// args '-9', hdfsCluster.pid +// } +//} - into ("hadoop-libs") { - from configurations.hadoop2.allArtifacts.files - from configurations.hadoop2 - } -} - -task distZipNoHadoop(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - exclude "hadoop-libs/**" - } - - from sourceSets.main.output.resourcesDir - - description = "Builds archive (without any Hadoop dependencies) suitable for download page." - classifier = "lite" -} +//integTest.dependsOn(miniHdfsStart) +//integTest.finalizedBy(miniHdfsStop) +thirdPartyAudit.enabled = false artifacts { archives bundlePlugin 'default' bundlePlugin - archives distZipHadoop1 - archives distZipHadoop2 - archives distZipNoHadoop -} - -integTest { - cluster { - plugin(pluginProperties.extension.name, zipTree(distZipHadoop2.archivePath)) - } -} - -// classes are missing, e.g. org.mockito.Mockito -thirdPartyAudit.missingClasses = true +} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 242dc2f3269..9b65f7bec2f 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -18,16 +18,6 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.Repository; - import java.io.IOException; import java.lang.reflect.Method; import java.net.URI; @@ -41,13 +31,23 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.Repository; + // // Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode. // This poses two problems: // - Hadoop itself comes with tons of jars, many providing the same classes across packages. In particular Hadoop 2 provides package annotations in the same // package across jars which trips JarHell. Thus, to allow Hadoop jars to load, the plugin uses a dedicated CL which picks them up from the hadoop-libs folder. // - The issue though with using a different CL is that it picks up the jars from a different location / codeBase and thus it does not fall under the plugin -// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. +// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. // There are different approaches here: // - implement a custom classloader that loads the jars but 'lies' about the codesource. It is doable but since URLClassLoader is locked down, one would // would have to implement the whole jar opening and loading from it. Not impossible but still fairly low-level. @@ -64,7 +64,7 @@ import java.util.Locale; // - package plugin.hadoop.hdfs is part of the plugin // - all the other packages are assumed to be in the nested Hadoop CL. -// Code +// Code public class HdfsPlugin extends Plugin { @Override @@ -81,7 +81,7 @@ public class HdfsPlugin extends Plugin { public void onModule(RepositoriesModule repositoriesModule) { String baseLib = Utils.detectLibFolder(); List cp = getHadoopClassLoaderPath(baseLib); - + ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader()); Class repository = null; @@ -170,4 +170,4 @@ public class HdfsPlugin extends Plugin { } } } -} +} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java index 89fa3f5910f..ad915adeb2a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,8 +16,7 @@ package org.elasticsearch.plugin.hadoop.hdfs; * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.SpecialPermission; +package org.elasticsearch.plugin.hadoop.hdfs; import java.net.URL; import java.security.AccessControlContext; @@ -28,6 +25,8 @@ import java.security.DomainCombiner; import java.security.PrivilegedAction; import java.security.ProtectionDomain; +import org.elasticsearch.SpecialPermission; + public abstract class Utils { protected static AccessControlContext hadoopACC() { @@ -100,4 +99,4 @@ public abstract class Utils { return base; } -} +} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java similarity index 88% rename from plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java rename to plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java index 7b9ec8331d9..c430d4f6aed 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java @@ -19,11 +19,11 @@ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileContext; import java.io.IOException; -interface FsCallback { +interface FcCallback { - V doInHdfs(FileSystem fs) throws IOException; + V doInHdfs(FileContext fc) throws IOException; } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java similarity index 87% rename from plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java rename to plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java index b0b5fb10c33..0080b7fe239 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileContext; import java.io.IOException; -interface FileSystemFactory { +interface FileContextFactory { - FileSystem getFileSystem() throws IOException; + FileContext getFileContext() throws IOException; } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index f71ca7020a8..47024796b03 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -18,24 +18,27 @@ */ package org.elasticsearch.repositories.hdfs; +import org.apache.hadoop.fs.CreateFlag; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.fs.Syncable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; +import java.util.EnumSet; import java.util.LinkedHashMap; -import java.util.Locale; import java.util.Map; public class HdfsBlobContainer extends AbstractBlobContainer { @@ -52,10 +55,10 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try { - return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + return SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.exists(new Path(path, blobName)); + public Boolean doInHdfs(FileContext fc) throws IOException { + return fc.util().exists(new Path(path, blobName)); } }); } catch (Exception e) { @@ -65,46 +68,77 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.delete(new Path(path, blobName), true); + public Boolean doInHdfs(FileContext fc) throws IOException { + return fc.delete(new Path(path, blobName), true); } }); } @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - boolean rename = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); + public Void doInHdfs(FileContext fc) throws IOException { + // _try_ to hsync the file before appending + // since append is optional this is a best effort + Path source = new Path(path, sourceBlobName); + + // try-with-resource is nice but since this is optional, it's hard to figure out + // what worked and what didn't. + // it's okay to not be able to append the file but not okay if hsync fails + // classic try / catch to the rescue + + FSDataOutputStream stream = null; + try { + stream = fc.create(source, EnumSet.of(CreateFlag.APPEND, CreateFlag.SYNC_BLOCK), CreateOpts.donotCreateParent()); + } catch (IOException ex) { + // append is optional, ignore + } + if (stream != null) { + try (OutputStream s = stream) { + if (s instanceof Syncable) { + ((Syncable) s).hsync(); + } + } + } + + // finally rename + fc.rename(source, new Path(path, targetBlobName)); + return null; } }); - - if (!rename) { - throw new IOException(String.format(Locale.ROOT, "can not move blob from [%s] to [%s]", sourceBlobName, targetBlobName)); - } } @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + return SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public InputStream doInHdfs(FileSystem fs) throws IOException { - return fs.open(new Path(path, blobName), blobStore.bufferSizeInBytes()); + public InputStream doInHdfs(FileContext fc) throws IOException { + return fc.open(new Path(path, blobName), blobStore.bufferSizeInBytes()); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - try (OutputStream stream = createOutput(blobName)) { - Streams.copy(inputStream, stream); + public Void doInHdfs(FileContext fc) throws IOException { + // don't use Streams to manually call hsync + // note that the inputstream is NOT closed here for two reasons: + // 1. it is closed already by ES after executing this method + // 0. closing the stream twice causes Hadoop to issue WARNING messages which are basically noise + // see https://issues.apache.org/jira/browse/HDFS-8099 + try (FSDataOutputStream stream = createOutput(fc, blobName)) { + int bytesRead; + byte[] buffer = new byte[blobStore.bufferSizeInBytes()]; + while ((bytesRead = inputStream.read(buffer)) != -1) { + stream.write(buffer, 0, bytesRead); + } + stream.hsync(); } return null; } @@ -113,34 +147,34 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public void writeBlob(String blobName, BytesReference bytes) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - try (OutputStream stream = createOutput(blobName)) { + public Void doInHdfs(FileContext fc) throws IOException { + try (FSDataOutputStream stream = createOutput(fc, blobName)) { bytes.writeTo(stream); + stream.hsync(); } return null; } }); } - private OutputStream createOutput(String blobName) throws IOException { - Path file = new Path(path, blobName); - // FSDataOutputStream does buffering internally - return blobStore.fileSystemFactory().getFileSystem().create(file, true, blobStore.bufferSizeInBytes()); + private FSDataOutputStream createOutput(FileContext fc, String blobName) throws IOException { + return fc.create(new Path(path, blobName), EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK), + CreateOpts.bufferSize(blobStore.bufferSizeInBytes()), CreateOpts.createParent()); } @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public FileStatus[] doInHdfs(FileSystem fs) throws IOException { - return fs.listStatus(path, new PathFilter() { + public FileStatus[] doInHdfs(FileContext fc) throws IOException { + return (!fc.util().exists(path) ? null : fc.util().listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(blobNamePrefix); } - }); + })); } }); if (files == null || files.length == 0) { @@ -155,10 +189,10 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override - public FileStatus[] doInHdfs(FileSystem fs) throws IOException { - return fs.listStatus(path); + public FileStatus[] doInHdfs(FileContext fc) throws IOException { + return (!fc.util().exists(path) ? null : fc.util().listStatus(path)); } }); if (files == null || files.length == 0) { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index b75485fa7fe..9c6dac7b68a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobContainer; @@ -35,14 +35,14 @@ import java.util.concurrent.Executor; public class HdfsBlobStore extends AbstractComponent implements BlobStore { - private final FileSystemFactory ffs; + private final FileContextFactory fcf; private final Path rootHdfsPath; private final ThreadPool threadPool; private final int bufferSizeInBytes; - public HdfsBlobStore(Settings settings, FileSystemFactory ffs, Path path, ThreadPool threadPool) throws IOException { + public HdfsBlobStore(Settings settings, FileContextFactory ffs, Path path, ThreadPool threadPool) throws IOException { super(settings); - this.ffs = ffs; + this.fcf = ffs; this.rootHdfsPath = path; this.threadPool = threadPool; @@ -52,11 +52,11 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { } private void mkdirs(Path path) throws IOException { - SecurityUtils.execute(ffs, new FsCallback() { + SecurityUtils.execute(fcf, new FcCallback() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - if (!fs.exists(path)) { - fs.mkdirs(path); + public Void doInHdfs(FileContext fc) throws IOException { + if (fc.util().exists(path)) { + fc.mkdir(path, null, true); } return null; } @@ -68,8 +68,8 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { return rootHdfsPath.toUri().toString(); } - public FileSystemFactory fileSystemFactory() { - return ffs; + public FileContextFactory fileContextFactory() { + return fcf; } public Path path() { @@ -91,10 +91,10 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { @Override public void delete(BlobPath path) throws IOException { - SecurityUtils.execute(ffs, new FsCallback() { + SecurityUtils.execute(fcf, new FcCallback() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - fs.delete(translateToHdfsPath(path), true); + public Void doInHdfs(FileContext fc) throws IOException { + fc.delete(translateToHdfsPath(path), true); return null; } }); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index b5b5b4d0f4a..d5208665c6f 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -19,9 +19,9 @@ package org.elasticsearch.repositories.hdfs; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.AbstractFileSystem; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; @@ -51,7 +51,7 @@ import java.util.Locale; import java.util.Map; import java.util.Map.Entry; -public class HdfsRepository extends BlobStoreRepository implements FileSystemFactory { +public class HdfsRepository extends BlobStoreRepository implements FileContextFactory { public final static String TYPE = "hdfs"; @@ -60,7 +60,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac private final ByteSizeValue chunkSize; private final boolean compress; private final RepositorySettings repositorySettings; - private FileSystem fs; + private FileContext fc; @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { @@ -74,16 +74,16 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac } // get configuration - fs = getFileSystem(); - Path hdfsPath = SecurityUtils.execute(fs, new FsCallback() { + fc = getFileContext(); + Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { @Override - public Path doInHdfs(FileSystem fs) throws IOException { - return fs.makeQualified(new Path(path)); + public Path doInHdfs(FileContext fc) throws IOException { + return fc.makeQualified(new Path(path)); } }); this.basePath = BlobPath.cleanPath(); - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fs, fs.getUri(), hdfsPath); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); @@ -91,7 +91,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. @Override - public FileSystem getFileSystem() throws IOException { + public FileContext getFileContext() throws IOException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { // unprivileged code such as scripts do not have SpecialPermission @@ -99,10 +99,10 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac } try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { + return AccessController.doPrivileged(new PrivilegedExceptionAction() { @Override - public FileSystem run() throws IOException { - return doGetFileSystem(); + public FileContext run() throws IOException { + return doGetFileContext(); } }, SecurityUtils.AccBridge.acc()); } catch (PrivilegedActionException pae) { @@ -120,37 +120,37 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac } } - private FileSystem doGetFileSystem() throws IOException { + private FileContext doGetFileContext() throws IOException { // check if the fs is still alive // make a cheap call that triggers little to no security checks - if (fs != null) { + if (fc != null) { try { - fs.isFile(fs.getWorkingDirectory()); + fc.util().exists(fc.getWorkingDirectory()); } catch (IOException ex) { if (ex.getMessage().contains("Filesystem closed")) { - fs = null; + fc = null; } else { throw ex; } } } - if (fs == null) { + if (fc == null) { Thread th = Thread.currentThread(); ClassLoader oldCL = th.getContextClassLoader(); try { th.setContextClassLoader(getClass().getClassLoader()); - return initFileSystem(repositorySettings); + return initFileContext(repositorySettings); } catch (IOException ex) { throw ex; } finally { th.setContextClassLoader(oldCL); } } - return fs; + return fc; } - private FileSystem initFileSystem(RepositorySettings repositorySettings) throws IOException { + private FileContext initFileContext(RepositorySettings repositorySettings) throws IOException { Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", settings.getAsBoolean("load_defaults", true))); cfg.setClassLoader(this.getClass().getClassLoader()); @@ -175,15 +175,16 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac } String uri = repositorySettings.settings().get("uri", settings.get("uri")); - URI actualUri = (uri != null ? URI.create(uri) : FileSystem.getDefaultUri(cfg)); - String user = repositorySettings.settings().get("user", settings.get("user")); + URI actualUri = (uri != null ? URI.create(uri) : null); try { // disable FS cache String disableFsCache = String.format(Locale.ROOT, "fs.%s.impl.disable.cache", actualUri.getScheme()); cfg.setBoolean(disableFsCache, true); - return (user != null ? FileSystem.get(actualUri, cfg, user) : FileSystem.get(actualUri, cfg)); + // create the AFS manually since through FileContext is relies on Subject.doAs for no reason at all + AbstractFileSystem fs = AbstractFileSystem.get(actualUri, cfg); + return FileContext.getFileContext(fs, cfg); } catch (Exception ex) { throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create Hdfs file-system for uri [%s]", actualUri), ex); } @@ -253,7 +254,8 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac protected void doClose() throws ElasticsearchException { super.doClose(); - IOUtils.closeStream(fs); - fs = null; + // TODO: FileContext does not support any close - is there really no way + // to handle it? + fc = null; } -} +} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index 550224082d9..545e1fc62b5 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.plugin.hadoop.hdfs.Utils; @@ -38,11 +38,11 @@ class SecurityUtils { } } - static V execute(FileSystemFactory ffs, FsCallback callback) throws IOException { - return execute(ffs.getFileSystem(), callback); + static V execute(FileContextFactory fcf, FcCallback callback) throws IOException { + return execute(fcf.getFileContext(), callback); } - static V execute(FileSystem fs, FsCallback callback) throws IOException { + static V execute(FileContext fc, FcCallback callback) throws IOException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { // unprivileged code such as scripts do not have SpecialPermission @@ -53,7 +53,7 @@ class SecurityUtils { return AccessController.doPrivileged(new PrivilegedExceptionAction() { @Override public V run() throws IOException { - return callback.doInHdfs(fs); + return callback.doInHdfs(fc); } }, AccBridge.acc()); } catch (PrivilegedActionException pae) { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index d26acd121e4..f80e103201b 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -28,40 +28,34 @@ grant { permission java.lang.RuntimePermission "setContextClassLoader"; // - // Hadoop 1 + // Hadoop 2 // - - // UserGroupInformation (UGI) + + // UserGroupInformation (UGI) Metrics + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // Shell initialization - reading system props + permission java.util.PropertyPermission "*", "read,write"; + // UGI triggers JAAS permission javax.security.auth.AuthPermission "getSubject"; // JAAS libraries are not loaded with the proper context in Hadoop, hence why the permission is needed here permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; - // which triggers the use of the Kerberos library - permission java.lang.RuntimePermission "accessClassInPackage.sun.security.krb5"; - // plus LoginContext permission javax.security.auth.AuthPermission "modifyPrincipals"; permission javax.security.auth.AuthPermission "modifyPublicCredentials"; permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; - - // - // Hadoop 2 - // - // UGI (Ugi Metrics) - permission java.lang.RuntimePermission "accessDeclaredMembers"; - - // Shell initialization - reading system props - permission java.util.PropertyPermission "*", "read,write"; + //permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials \"*\"", "read"; - permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials \"*\"", "read"; + permission javax.security.auth.AuthPermission "doAs"; - // HftpFileSystem (all present FS are loaded and initialized at startup ...) - permission java.lang.RuntimePermission "setFactory"; -}; \ No newline at end of file + // DFSClient init (metrics again) + permission java.lang.RuntimePermission "shutdownHooks"; + }; \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java index 065c06208ef..30d1aafcaba 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,17 +16,20 @@ package org.elasticsearch.plugin.hadoop.hdfs; * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.plugin.hadoop.hdfs; + +import java.io.IOException; +import java.util.Collection; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; -import java.io.IOException; -import java.util.Collection; - public class HdfsRepositoryRestIT extends ESRestTestCase { @Override diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java index e980b6a26e3..868e39813e3 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,6 +16,7 @@ package org.elasticsearch.plugin.hadoop.hdfs; * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.plugin.hadoop.hdfs; import java.net.URL; import java.util.Collections; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index b4b530e916b..6ea9cd16544 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESIntegTestCase.ThirdParty; import org.elasticsearch.test.store.MockFSDirectoryService; import org.junit.After; import org.junit.Before; @@ -45,7 +44,11 @@ import static org.hamcrest.Matchers.greaterThan; /** * You must specify {@code -Dtests.thirdparty=true} */ -@ThirdParty +// Make sure to start MiniHDFS cluster before +// otherwise, one will get some wierd PrivateCredentialPermission exception +// caused by the HDFS fallback code (which doesn't do much anyway) + +// @ThirdParty @ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) public class HdfsTests extends ESIntegTestCase { @@ -75,10 +78,12 @@ public class HdfsTests extends ESIntegTestCase { } private String path; + private int port; @Before public final void wipeBefore() throws Exception { wipeRepositories(); + port = MiniHDFS.getPort(); path = "build/data/repo-" + randomInt(); } @@ -94,9 +99,9 @@ public class HdfsTests extends ESIntegTestCase { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - //.put("uri", "hdfs://127.0.0.1:51227") + .put("uri", "hdfs://127.0.0.1:" + port) .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - .put("uri", "es-hdfs://./build/") + // .put("uri", "es-hdfs:///") .put("path", path) .put("conf", "additional-cfg.xml, conf-2.xml") .put("chunk_size", randomIntBetween(100, 1000) + "k") @@ -178,9 +183,9 @@ public class HdfsTests extends ESIntegTestCase { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - // .put("uri", "hdfs://127.0.0.1:51227/") - .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - .put("uri", "es-hdfs:///") + .put("uri", "hdfs://127.0.0.1:" + port) + // .put("uri", "es-hdfs:///") + .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) .put("path", path + "a@b$c#11:22") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean())) @@ -215,4 +220,4 @@ public class HdfsTests extends ESIntegTestCase { private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} +} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java new file mode 100644 index 00000000000..a492ce7ae6d --- /dev/null +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.hadoop.hdfs; + +import com.carrotsearch.randomizedtesting.RandomizedTest; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.monitor.jvm.JvmInfo; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Locale; + +public class MiniHDFS { + + private static volatile MiniDFSCluster dfs; + + private static String PORT_FILE_NAME = "minihdfs.port"; + private static String PID_FILE_NAME = "minihdfs.pid"; + + public static void main(String[] args) throws Exception { + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + MiniHDFS.stop(); + } + }); + start(); + } + + public static int start() throws IOException { + if (dfs != null) { + return -1; + } + + Path basePath = getBasePath(); + Path portPath = basePath.resolve(PORT_FILE_NAME); + Path pidPath = basePath.resolve(PID_FILE_NAME); + + if (Files.exists(basePath)) { + RandomizedTest.rmDir(basePath); + } + + Configuration cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, getBasePath().toAbsolutePath().toString()); + // lower default permission + cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); + dfs = new MiniDFSCluster.Builder(cfg).build(); + int port = dfs.getNameNodePort(); + + // write port + Files.write(portPath, Integer.toString(port).getBytes(StandardCharsets.UTF_8)); + // write pid + Files.write(pidPath, Long.toString(JvmInfo.jvmInfo().getPid()).getBytes(StandardCharsets.UTF_8)); + + System.out.printf(Locale.ROOT, "Started HDFS at %s\n", dfs.getURI()); + System.out.printf(Locale.ROOT, "Port information available at %s\n", portPath.toRealPath()); + System.out.printf(Locale.ROOT, "PID information available at %s\n", pidPath.toRealPath()); + return port; + } + + private static Path getBasePath() { + Path tmpFolder = PathUtils.get(System.getProperty("java.io.tmpdir")); + // "test.build.data" + String baseFolder = System.getProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, "es-test/build/test/data"); + return tmpFolder.resolve(baseFolder); + } + + public static int getPort() throws IOException { + Path portPath = getBasePath().resolve(PORT_FILE_NAME); + if (Files.exists(portPath)) { + return Integer.parseInt(new String(Files.readAllBytes(portPath), StandardCharsets.UTF_8)); + } + throw new IllegalStateException(String.format(Locale.ROOT, "Cannot find Mini DFS port file at %s ; was '%s' started?", portPath.toAbsolutePath(), MiniHDFS.class)); + } + + public static long getPid() throws Exception { + Path pidPath = getBasePath().resolve(PID_FILE_NAME); + if (Files.exists(pidPath)) { + return Long.parseLong(new String(Files.readAllBytes(pidPath), StandardCharsets.UTF_8)); + } + throw new IllegalStateException(String.format(Locale.ROOT, "Cannot find Mini DFS pid file at %s ; was '%s' started?", pidPath.toAbsolutePath(), MiniHDFS.class)); + } + + + public static void stop() { + if (dfs != null) { + dfs.shutdown(true); + dfs = null; + } + } +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java deleted file mode 100644 index 0d700615a1a..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.hadoop.hdfs; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.elasticsearch.common.SuppressForbidden; - -import java.io.File; - -public class MiniHDFSCluster { - - @SuppressForbidden(reason = "Hadoop is messy") - public static void main(String[] args) throws Exception { - FileUtil.fullyDelete(new File(System.getProperty("test.build.data", "build/test/data"), "dfs/")); - // MiniHadoopClusterManager.main(new String[] { "-nomr" }); - Configuration cfg = new Configuration(); - cfg.set(DataNode.DATA_DIR_PERMISSION_KEY, "666"); - cfg.set("dfs.replication", "0"); - MiniDFSCluster dfsCluster = new MiniDFSCluster(cfg, 1, true, null); - FileSystem fs = dfsCluster.getFileSystem(); - System.out.println(fs.getClass()); - System.out.println(fs.getUri()); - System.out.println(dfsCluster.getHftpFileSystem().getClass()); - - // dfsCluster.shutdown(); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java index 37aecb04b9b..b0ef392447e 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,6 +16,7 @@ package org.elasticsearch.plugin.hadoop.hdfs; * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.plugin.hadoop.hdfs; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml new file mode 100644 index 00000000000..f1f5f7a65e0 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -0,0 +1,25 @@ +# Integration tests for HDFS Repository plugin +# +# Check plugin is installed +# +"HDFS Repository Config": + - do: + snapshot.create_repository: + repository: test_repo_hdfs_1 + verify: false + body: + type: hdfs + settings: + # local HDFS implementation + conf.fs.es-hdfs.impl: "org.elasticsearch.repositories.hdfs.TestingFs" + uri: "es-hdfs://./build/" + path: "build/data/repo-hdfs" + + # Get repositry + - do: + snapshot.get_repository: + repository: test_repo_hdfs_1 + + - is_true: test_repo_hdfs_1 + - is_true: test_repo_hdfs_1.settings.uri + - match: {test_repo_hdfs_1.settings.path : "build/data/repo-hdfs"} From 91fe99a7f631e943b0fe6972fbac6bc6cc720749 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 16:52:22 -0800 Subject: [PATCH 151/322] Make hdfs plugin not use transitive deps --- plugins/repository-hdfs/build.gradle | 135 ++---------------- .../plugin/hadoop/hdfs/HdfsTests.java | 6 +- .../plugin/hadoop/hdfs/MiniHDFS.java | 114 --------------- 3 files changed, 12 insertions(+), 243 deletions(-) delete mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index cb7d0e4628f..6383a684018 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -28,140 +28,23 @@ versions << [ 'hadoop2': '2.7.1' ] -configurations { - hadoop2 -} - dependencies { - hadoop2 ("org.apache.hadoop:hadoop-client:${versions.hadoop2}") { - exclude module: "hadoop-yarn-common" - exclude module: "hadoop-mapreduce-client-app" - exclude module: "hadoop-mapreduce-client-core" - exclude module: "hadoop-mapreduce-client-jobclient" - exclude module: "hadoop-yarn-api" - - exclude group: "commons-cli" - exclude group: "com.sun.jersey" - exclude group: "com.sun.jersey.contribs" - exclude group: "com.sun.jersey.jersey-test-framework" - exclude module: "guice" - exclude group: "org.mortbay.jetty" - exclude group: "tomcat" - exclude module: "commons-el" - exclude module: "hsqldb" - exclude group: "org.eclipse.jdt" - exclude module: "commons-beanutils" - exclude module: "commons-beanutils-core" - exclude module: "javax.servlet" - exclude module: "junit" - exclude module: "netty" - // provided by ES itself - exclude group: "log4j" - } - - hadoop2 ("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}") { - // prevent jar hell - exclude module: "hadoop-yarn-common" - exclude module: "commons-cli" - exclude module: "netty" - exclude module: "guava" - exclude module: "junit" - // provided by ES itself - exclude group: "log4j" - } - - provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - provided configurations.hadoop2 + compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}" + compile 'com.google.guava:guava:16.0.1' + compile 'commons-logging:commons-logging:1.1.3' + compile 'commons-collections:commons-collections:3.2.2' + compile 'commons-configuration:commons-configuration:1.6' + compile 'commons-lang:commons-lang:2.6' - testCompile ("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}:tests") { - exclude module: "commons-cli" - exclude module: "netty" - } - - testCompile ("org.apache.hadoop:hadoop-common:${versions.hadoop2}:tests") { - exclude module: "commons-cli" - } } -configurations.all { - // used due to _transitive_ configuration - resolutionStrategy { - force "commons-cli:commons-cli:1.3.1" - force "io.netty:netty:3.10.5.Final" - force "commons-codec:commons-codec:${versions.commonscodec}" - force "commons-logging:commons-logging:${versions.commonslogging}" - force "commons-lang:commons-lang:2.6" - force "commons-httpclient:commons-httpclient:3.0.1" - force "org.codehaus.jackson:jackson-core-asl:1.9.13" - force "org.codehaus.jackson:jackson-mapper-asl:1.9.13" - force "com.google.code.findbugs:jsr305:3.0.0" - force "com.google.guava:guava:16.0.1" - force "org.slf4j:slf4j-api:${versions.slf4j}" - force "org.slf4j:slf4j-log4j12:${versions.slf4j}" - force "junit:junit:${versions.junit}" - force "org.apache.httpcomponents:httpclient:4.3.6" - force "log4j:log4j:${versions.log4j}" - } -} dependencyLicenses { mapping from: /hadoop-.*/, to: 'hadoop' } compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' -// main jar includes just the plugin classes -jar { - include "org/elasticsearch/plugin/hadoop/hdfs/*" -} - -// hadoop jar (which actually depend on Hadoop) -task hadoopLinkedJar(type: Jar, dependsOn:jar) { - appendix "internal" - from sourceSets.main.output.classesDir - // exclude plugin - exclude "org/elasticsearch/plugin/hadoop/hdfs/*" -} - - -bundlePlugin.dependsOn hadoopLinkedJar - -// configure 'bundle' as being w/o Hadoop deps -bundlePlugin { - into ("internal-libs") { - from hadoopLinkedJar.archivePath - } - - into ("hadoop-libs") { - from configurations.hadoop2.allArtifacts.files - from configurations.hadoop2 - } -} - -task miniHdfsStart(type: JavaExec) { - classpath = sourceSets.test.compileClasspath + sourceSets.test.output - main = "org.elasticsearch.plugin.hadoop.hdfs.MiniHDFS" - errorOutput = new FileOutputStream("build/minihdfs.err") - standardOutput = new FileOutputStream("build/minihdfs.out") - //ext.hdfsPid = (main as Class).getPid -} - -//task miniHdfsStop(type: org.elasticsearch.gradle.LoggedExec) { -// onlyIf { hdfsPid > -1 } -// if (Os.isFamily(Os.FAMILY_WINDOWS)) { -// executable 'Taskkill' -// args '/PID', hdfsCluster.pid, '/F' -// } else { -// executable 'kill' -// args '-9', hdfsCluster.pid -// } -//} - -//integTest.dependsOn(miniHdfsStart) -//integTest.finalizedBy(miniHdfsStop) - thirdPartyAudit.enabled = false - -artifacts { - archives bundlePlugin - 'default' bundlePlugin -} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index 6ea9cd16544..3f7d177c516 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -83,8 +83,8 @@ public class HdfsTests extends ESIntegTestCase { @Before public final void wipeBefore() throws Exception { wipeRepositories(); - port = MiniHDFS.getPort(); - path = "build/data/repo-" + randomInt(); + //port = MiniHDFS.getPort(); + //path = "build/data/repo-" + randomInt(); } @After @@ -220,4 +220,4 @@ public class HdfsTests extends ESIntegTestCase { private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java deleted file mode 100644 index a492ce7ae6d..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFS.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.hadoop.hdfs; - -import com.carrotsearch.randomizedtesting.RandomizedTest; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.monitor.jvm.JvmInfo; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Locale; - -public class MiniHDFS { - - private static volatile MiniDFSCluster dfs; - - private static String PORT_FILE_NAME = "minihdfs.port"; - private static String PID_FILE_NAME = "minihdfs.pid"; - - public static void main(String[] args) throws Exception { - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - MiniHDFS.stop(); - } - }); - start(); - } - - public static int start() throws IOException { - if (dfs != null) { - return -1; - } - - Path basePath = getBasePath(); - Path portPath = basePath.resolve(PORT_FILE_NAME); - Path pidPath = basePath.resolve(PID_FILE_NAME); - - if (Files.exists(basePath)) { - RandomizedTest.rmDir(basePath); - } - - Configuration cfg = new Configuration(); - cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, getBasePath().toAbsolutePath().toString()); - // lower default permission - cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); - dfs = new MiniDFSCluster.Builder(cfg).build(); - int port = dfs.getNameNodePort(); - - // write port - Files.write(portPath, Integer.toString(port).getBytes(StandardCharsets.UTF_8)); - // write pid - Files.write(pidPath, Long.toString(JvmInfo.jvmInfo().getPid()).getBytes(StandardCharsets.UTF_8)); - - System.out.printf(Locale.ROOT, "Started HDFS at %s\n", dfs.getURI()); - System.out.printf(Locale.ROOT, "Port information available at %s\n", portPath.toRealPath()); - System.out.printf(Locale.ROOT, "PID information available at %s\n", pidPath.toRealPath()); - return port; - } - - private static Path getBasePath() { - Path tmpFolder = PathUtils.get(System.getProperty("java.io.tmpdir")); - // "test.build.data" - String baseFolder = System.getProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, "es-test/build/test/data"); - return tmpFolder.resolve(baseFolder); - } - - public static int getPort() throws IOException { - Path portPath = getBasePath().resolve(PORT_FILE_NAME); - if (Files.exists(portPath)) { - return Integer.parseInt(new String(Files.readAllBytes(portPath), StandardCharsets.UTF_8)); - } - throw new IllegalStateException(String.format(Locale.ROOT, "Cannot find Mini DFS port file at %s ; was '%s' started?", portPath.toAbsolutePath(), MiniHDFS.class)); - } - - public static long getPid() throws Exception { - Path pidPath = getBasePath().resolve(PID_FILE_NAME); - if (Files.exists(pidPath)) { - return Long.parseLong(new String(Files.readAllBytes(pidPath), StandardCharsets.UTF_8)); - } - throw new IllegalStateException(String.format(Locale.ROOT, "Cannot find Mini DFS pid file at %s ; was '%s' started?", pidPath.toAbsolutePath(), MiniHDFS.class)); - } - - - public static void stop() { - if (dfs != null) { - dfs.shutdown(true); - dfs = null; - } - } -} From c2c5081830ac8a4febbc3ad69832daa26ae0d278 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Dec 2015 17:01:38 -0800 Subject: [PATCH 152/322] Remove uneeded class loading stuff from hdfs plugin --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 94 +------------------ .../plugin/hadoop/hdfs/HdfsTestPlugin.java | 31 ------ .../plugin/hadoop/hdfs/HdfsTests.java | 3 +- 3 files changed, 5 insertions(+), 123 deletions(-) delete mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 9b65f7bec2f..dfb9d5c2662 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.hdfs.HdfsRepository; // // Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode. @@ -79,95 +80,6 @@ public class HdfsPlugin extends Plugin { @SuppressWarnings("unchecked") public void onModule(RepositoriesModule repositoriesModule) { - String baseLib = Utils.detectLibFolder(); - List cp = getHadoopClassLoaderPath(baseLib); - - ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader()); - - Class repository = null; - try { - repository = (Class) hadoopCL.loadClass("org.elasticsearch.repositories.hdfs.HdfsRepository"); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("Cannot load plugin class; is the plugin class setup correctly?", cnfe); - } - - repositoriesModule.registerRepository("hdfs", repository, BlobStoreIndexShardRepository.class); - Loggers.getLogger(HdfsPlugin.class).info("Loaded Hadoop [{}] libraries from {}", getHadoopVersion(hadoopCL), baseLib); + repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class); } - - protected List getHadoopClassLoaderPath(String baseLib) { - List cp = new ArrayList<>(); - // add plugin internal jar - discoverJars(createURI(baseLib, "internal-libs"), cp, false); - // add Hadoop jars - discoverJars(createURI(baseLib, "hadoop-libs"), cp, true); - return cp; - } - - private String getHadoopVersion(ClassLoader hadoopCL) { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - return AccessController.doPrivileged(new PrivilegedAction() { - @Override - public String run() { - // Hadoop 2 relies on TCCL to determine the version - ClassLoader tccl = Thread.currentThread().getContextClassLoader(); - try { - Thread.currentThread().setContextClassLoader(hadoopCL); - return doGetHadoopVersion(hadoopCL); - } finally { - Thread.currentThread().setContextClassLoader(tccl); - } - } - }, Utils.hadoopACC()); - } - - private String doGetHadoopVersion(ClassLoader hadoopCL) { - String version = "Unknown"; - - Class clz = null; - try { - clz = hadoopCL.loadClass("org.apache.hadoop.util.VersionInfo"); - } catch (ClassNotFoundException cnfe) { - // unknown - } - if (clz != null) { - try { - Method method = clz.getMethod("getVersion"); - version = method.invoke(null).toString(); - } catch (Exception ex) { - // class has changed, ignore - } - } - - return version; - } - - private URI createURI(String base, String suffix) { - String location = base + suffix; - try { - return new URI(location); - } catch (URISyntaxException ex) { - throw new IllegalStateException(String.format(Locale.ROOT, "Cannot detect plugin folder; [%s] seems invalid", location), ex); - } - } - - @SuppressForbidden(reason = "discover nested jar") - private void discoverJars(URI libPath, List cp, boolean optional) { - try { - Path[] jars = FileSystemUtils.files(PathUtils.get(libPath), "*.jar"); - - for (Path path : jars) { - cp.add(path.toUri().toURL()); - } - } catch (IOException ex) { - if (!optional) { - throw new IllegalStateException("Cannot compute plugin classpath", ex); - } - } - } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java deleted file mode 100644 index 868e39813e3..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.hadoop.hdfs; - -import java.net.URL; -import java.util.Collections; -import java.util.List; - -public class HdfsTestPlugin extends HdfsPlugin { - - @Override - protected List getHadoopClassLoaderPath(String baseLib) { - return Collections.emptyList(); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index 3f7d177c516..3f643aebe94 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.hdfs.TestingFs; +import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -74,7 +75,7 @@ public class HdfsTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(HdfsTestPlugin.class); + return pluginList(HdfsPlugin.class); } private String path; From 5d976f14318eafb0f7be028755c7f28c598b350a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 20:16:15 -0500 Subject: [PATCH 153/322] add sniper for broken hadoop --- .../org/elasticsearch/bootstrap/ESPolicy.java | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index ee804b1480e..1cd3a9ad57e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.SuppressForbidden; import java.net.SocketPermission; import java.net.URL; +import java.io.FilePermission; +import java.io.IOException; import java.security.CodeSource; import java.security.Permission; import java.security.PermissionCollection; @@ -81,10 +83,39 @@ final class ESPolicy extends Policy { } } + // Special handling for broken Hadoop code: "let me execute or my classes will not load" + // yeah right, REMOVE THIS when hadoop is fixed + if (permission instanceof FilePermission && "<>".equals(permission.getName())) { + for (StackTraceElement element : Thread.currentThread().getStackTrace()) { + if ("org.apache.hadoop.util.Shell".equals(element.getClassName()) && + "runCommand".equals(element.getMethodName())) { + // we found the horrible method: the hack begins! + // force the hadoop code to back down, by throwing an exception that it catches. + rethrow(new IOException("no hadoop, you cannot do this.")); + } + } + } + // otherwise defer to template + dynamic file permissions return template.implies(domain, permission) || dynamic.implies(permission) || system.implies(domain, permission); } + /** + * Classy puzzler to rethrow any checked exception as an unchecked one. + */ + private static class Rethrower { + private void rethrow(Throwable t) throws T { + throw (T) t; + } + } + + /** + * Rethrows t (identical object). + */ + private void rethrow(Throwable t) { + new Rethrower().rethrow(t); + } + @Override public PermissionCollection getPermissions(CodeSource codesource) { // code should not rely on this method, or at least use it correctly: From 75ef9da53f8766857f967b2ff84b6707ea5d0e80 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 22:11:58 -0500 Subject: [PATCH 154/322] get up to connectexception --- distribution/build.gradle | 2 +- plugins/repository-hdfs/build.gradle | 7 ++++++- .../src/main/plugin-metadata/plugin-security.policy | 3 ++- .../test/hdfs_repository/20_repository.yaml | 9 +++------ 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index fcf22ff48dc..03b30848282 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -39,7 +39,7 @@ buildscript { } // this is common configuration for distributions, but we also add it here for the license check to use -ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api') +ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive() // .exclude(module: 'slf4j-api') /***************************************************************************** diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 6383a684018..9c174d7921e 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -33,12 +33,17 @@ dependencies { compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" + compile 'org.apache.htrace:htrace-core:3.1.0-incubating' compile 'com.google.guava:guava:16.0.1' + compile 'com.google.protobuf:protobuf-java:2.5.0' compile 'commons-logging:commons-logging:1.1.3' compile 'commons-collections:commons-collections:3.2.2' compile 'commons-configuration:commons-configuration:1.6' + compile 'commons-io:commons-io:2.4' compile 'commons-lang:commons-lang:2.6' - + compile 'javax.servlet:servlet-api:2.5' + // we need this one, its not really 'provided' compile 'org.slf4j:slf4j-api:${versions.slf4j}' } dependencyLicenses { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index f80e103201b..82009d888fe 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -44,6 +44,7 @@ grant { // JAAS libraries are not loaded with the proper context in Hadoop, hence why the permission is needed here permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; + permission java.lang.RuntimePermission "loadLibrary.jaas_unix"; // plus LoginContext permission javax.security.auth.AuthPermission "modifyPrincipals"; @@ -58,4 +59,4 @@ grant { // DFSClient init (metrics again) permission java.lang.RuntimePermission "shutdownHooks"; - }; \ No newline at end of file + }; diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index f1f5f7a65e0..8e97c103dc2 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -10,10 +10,8 @@ body: type: hdfs settings: - # local HDFS implementation - conf.fs.es-hdfs.impl: "org.elasticsearch.repositories.hdfs.TestingFs" - uri: "es-hdfs://./build/" - path: "build/data/repo-hdfs" + uri: "hdfs://localhost:8888" + path: "foo/bar" # Get repositry - do: @@ -21,5 +19,4 @@ repository: test_repo_hdfs_1 - is_true: test_repo_hdfs_1 - - is_true: test_repo_hdfs_1.settings.uri - - match: {test_repo_hdfs_1.settings.path : "build/data/repo-hdfs"} + - match: {test_repo_hdfs_1.settings.path : "foo/bar"} From fbe3d64ea4dd4f9bf6e69f5fc91a1eecfd27c21b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 22:55:15 -0500 Subject: [PATCH 155/322] add passing test that takes snapshot --- .../test/hdfs_repository/20_repository.yaml | 2 +- .../test/hdfs_repository/30_snapshot.yaml | 43 +++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index 8e97c103dc2..1bc76ddd863 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:8888" + uri: "hdfs://localhost:39226" path: "foo/bar" # Get repositry diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml new file mode 100644 index 00000000000..21e1317694e --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -0,0 +1,43 @@ +# Integration tests for HDFS Repository plugin +# +# Actually perform a snapshot to hdfs +# +--- +setup: + + - do: + snapshot.create_repository: + repository: test_repo_hdfs_snapshot + verify: false + body: + type: hdfs + settings: + uri: "hdfs://localhost:39226" + path: "foo/bar" + + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + + - do: + cluster.health: + wait_for_status: yellow + +--- +"Create a snapshot": + + - do: + snapshot.create: + repository: test_repo_hdfs_snapshot + snapshot: test_snapshot + wait_for_completion: true + + - match: { snapshot.snapshot: test_snapshot } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + From bc1196243898a289fb9adf70d597156a1743b1d5 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 23:16:41 -0500 Subject: [PATCH 156/322] get full snapshot restore tests passing --- .../hdfs_repository/20_repository.disabled | 25 ------- .../test/hdfs_repository/20_repository.yaml | 4 +- .../test/hdfs_repository/30_snapshot.yaml | 7 +- .../test/hdfs_repository/40_restore.yaml | 71 +++++++++++++++++++ 4 files changed, 79 insertions(+), 28 deletions(-) delete mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled deleted file mode 100644 index f1f5f7a65e0..00000000000 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled +++ /dev/null @@ -1,25 +0,0 @@ -# Integration tests for HDFS Repository plugin -# -# Check plugin is installed -# -"HDFS Repository Config": - - do: - snapshot.create_repository: - repository: test_repo_hdfs_1 - verify: false - body: - type: hdfs - settings: - # local HDFS implementation - conf.fs.es-hdfs.impl: "org.elasticsearch.repositories.hdfs.TestingFs" - uri: "es-hdfs://./build/" - path: "build/data/repo-hdfs" - - # Get repositry - - do: - snapshot.get_repository: - repository: test_repo_hdfs_1 - - - is_true: test_repo_hdfs_1 - - is_true: test_repo_hdfs_1.settings.uri - - match: {test_repo_hdfs_1.settings.path : "build/data/repo-hdfs"} diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index 1bc76ddd863..20cbcef35fd 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -10,10 +10,10 @@ body: type: hdfs settings: - uri: "hdfs://localhost:39226" + uri: "hdfs://localhost:36909" path: "foo/bar" - # Get repositry + # Get repository - do: snapshot.get_repository: repository: test_repo_hdfs_1 diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index 21e1317694e..95fcd7d5159 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -12,7 +12,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:39226" + uri: "hdfs://localhost:36909" path: "foo/bar" - do: @@ -41,3 +41,8 @@ setup: - match: { snapshot.shards.successful: 1 } - match: { snapshot.shards.failed : 0 } + # ghetto teardown, so we can debug this thing with external hdfs + - do: + snapshot.delete: + repository: test_repo_hdfs_snapshot + snapshot: test_snapshot diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml new file mode 100644 index 00000000000..767775e4039 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -0,0 +1,71 @@ +# Integration tests for HDFS Repository plugin +# +# Actually perform a snapshot to hdfs, then restore it +# +--- +setup: + + - do: + snapshot.create_repository: + repository: test_repo_hdfs_restore + verify: false + body: + type: hdfs + settings: + uri: "hdfs://localhost:36909" + path: "foo/bar" + + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + +--- +"Create a snapshot and then restore it": + + - do: + snapshot.create: + repository: test_repo_hdfs_restore + snapshot: test_restore + wait_for_completion: true + + - match: { snapshot.snapshot: test_restore } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + - is_true: snapshot.version + - gt: { snapshot.version_id: 0} + + - do: + indices.close: + index : test_index + + - do: + snapshot.restore: + repository: test_repo_hdfs_restore + snapshot: test_restore + wait_for_completion: true + + - do: + indices.recovery: + index: test_index + + - match: { test_index.shards.0.type: SNAPSHOT } + - match: { test_index.shards.0.stage: DONE } + - match: { test_index.shards.0.index.files.recovered: 1} + - gt: { test_index.shards.0.index.size.recovered_in_bytes: 0} + - match: { test_index.shards.0.index.files.reused: 0} + - match: { test_index.shards.0.index.size.reused_in_bytes: 0} + + # ghetto teardown, so we can debug this thing with external hdfs + - do: + snapshot.delete: + repository: test_repo_hdfs_restore + snapshot: test_restore From 02fbd5511893f2ca33e91448ae2c37d4da864392 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 23:45:05 -0500 Subject: [PATCH 157/322] enable thirdPartyAudit so you can see the crazy shit hadoop does --- plugins/repository-hdfs/build.gradle | 54 +++++++++++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 9c174d7921e..dfceeec14e0 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -52,4 +52,56 @@ dependencyLicenses { compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' -thirdPartyAudit.enabled = false +thirdPartyAudit.missingClasses = true +thirdPartyAudit.excludes = [ + // note: the jersey ones may be bogus, see my bug report at forbidden-apis! + // internal java api: com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable + // internal java api: com.sun.jersey.api.core.HttpContext + // internal java api: com.sun.jersey.core.spi.component.ComponentScope + // internal java api: com.sun.jersey.spi.inject.Injectable + // internal java api: com.sun.jersey.core.spi.component.ComponentContext + 'org.apache.hadoop.hdfs.web.resources.UserProvider', + + // internal java api: com.sun.jersey.spi.container.ResourceFilters + 'org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods', + // internal java api: com.sun.jersey.spi.container.servlet.ServletContainer + 'org.apache.hadoop.http.HttpServer', + 'org.apache.hadoop.http.HttpServer2', + + // internal java api: com.sun.jersey.api.ParamException + 'org.apache.hadoop.hdfs.web.resources.ExceptionHandler', + 'org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ExceptionHandler', + 'org.apache.hadoop.hdfs.web.ParamFilter', + + // internal java api: com.sun.jersey.spi.container.ContainerRequestFilter + // internal java api: com.sun.jersey.spi.container.ContainerRequest + 'org.apache.hadoop.hdfs.web.ParamFilter', + 'org.apache.hadoop.hdfs.web.ParamFilter$1', + + // internal java api: com.sun.jndi.ldap.LdapCtxFactory + 'org.apache.hadoop.security.LdapGroupsMapping', + + // internal java api: sun.net.dns.ResolverConfiguration + // internal java api: sun.net.util.IPAddressUtil + 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', + + // internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', + 'org.apache.hadoop.io.nativeio.NativeIO', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', + + // internal java api: sun.nio.ch.DirectBuffer + // internal java api: sun.misc.Cleaner + 'org.apache.hadoop.io.nativeio.NativeIO$POSIX', + 'org.apache.hadoop.crypto.CryptoStreamUtils', + + // internal java api: sun.misc.SignalHandler + 'org.apache.hadoop.util.SignalLogger$Handler', +] From 2e8c68d09bdc76bdb10d2c7fc921ed7a05139291 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 18 Dec 2015 23:51:41 -0500 Subject: [PATCH 158/322] Remove no-longer needed domaincombiner stuff --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 41 ------- .../plugin/hadoop/hdfs/Utils.java | 102 ------------------ .../repositories/hdfs/HdfsRepository.java | 2 +- .../repositories/hdfs/SecurityUtils.java | 10 +- .../plugin-metadata/plugin-security.policy | 3 - .../plugin/hadoop/hdfs/UtilsTests.java | 29 ----- 6 files changed, 2 insertions(+), 185 deletions(-) delete mode 100644 plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java delete mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index dfb9d5c2662..100cf73ddcd 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -18,52 +18,11 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; -import java.io.IOException; -import java.lang.reflect.Method; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.Path; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.hdfs.HdfsRepository; -// -// Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode. -// This poses two problems: -// - Hadoop itself comes with tons of jars, many providing the same classes across packages. In particular Hadoop 2 provides package annotations in the same -// package across jars which trips JarHell. Thus, to allow Hadoop jars to load, the plugin uses a dedicated CL which picks them up from the hadoop-libs folder. -// - The issue though with using a different CL is that it picks up the jars from a different location / codeBase and thus it does not fall under the plugin -// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. -// There are different approaches here: -// - implement a custom classloader that loads the jars but 'lies' about the codesource. It is doable but since URLClassLoader is locked down, one would -// would have to implement the whole jar opening and loading from it. Not impossible but still fairly low-level. -// Further more, even if the code has the proper credentials, it needs to use the proper Privileged blocks to use its full permissions which does not -// happen in the Hadoop code base. -// - use a different Policy. Works but the Policy is JVM wide and thus the code needs to be quite efficient - quite a bit impact to cover just some plugin -// libraries -// - use a DomainCombiner. This doesn't change the semantics (it's clear where the code is loaded from, etc..) however it gives us a scoped, fine-grained -// callback on handling the permission intersection for secured calls. Note that DC works only in the current PAC call - the moment another PA is used, -// the domain combiner is going to be ignored (unless the caller specifically uses it). Due to its scoped impact and official Java support, this approach -// was used. - -// ClassLoading info -// - package plugin.hadoop.hdfs is part of the plugin -// - all the other packages are assumed to be in the nested Hadoop CL. // Code public class HdfsPlugin extends Plugin { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java deleted file mode 100644 index ad915adeb2a..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.hadoop.hdfs; - -import java.net.URL; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.DomainCombiner; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; - -import org.elasticsearch.SpecialPermission; - -public abstract class Utils { - - protected static AccessControlContext hadoopACC() { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - return AccessController.doPrivileged(new PrivilegedAction() { - @Override - public AccessControlContext run() { - return new AccessControlContext(AccessController.getContext(), new HadoopDomainCombiner()); - } - }); - } - - private static class HadoopDomainCombiner implements DomainCombiner { - - private static String BASE_LIB = detectLibFolder(); - - @Override - public ProtectionDomain[] combine(ProtectionDomain[] currentDomains, ProtectionDomain[] assignedDomains) { - for (ProtectionDomain pd : assignedDomains) { - if (pd.getCodeSource().getLocation().toString().startsWith(BASE_LIB)) { - return assignedDomains; - } - } - - return currentDomains; - } - } - - static String detectLibFolder() { - ClassLoader cl = Utils.class.getClassLoader(); - - // we could get the URL from the URLClassloader directly - // but that can create issues when running the tests from the IDE - // we could detect that by loading resources but that as well relies on - // the JAR URL - String classToLookFor = HdfsPlugin.class.getName().replace(".", "/").concat(".class"); - URL classURL = cl.getResource(classToLookFor); - if (classURL == null) { - throw new IllegalStateException("Cannot detect itself; something is wrong with this ClassLoader " + cl); - } - - String base = classURL.toString(); - - // extract root - // typically a JAR URL - int index = base.indexOf("!/"); - if (index > 0) { - base = base.substring(0, index); - // remove its prefix (jar:) - base = base.substring(4); - // remove the trailing jar - index = base.lastIndexOf("/"); - base = base.substring(0, index + 1); - } - // not a jar - something else, do a best effort here - else { - // remove the class searched - base = base.substring(0, base.length() - classToLookFor.length()); - } - - // append / - if (!base.endsWith("/")) { - base = base.concat("/"); - } - - return base; - } -} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index d5208665c6f..2bda9d5857a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -104,7 +104,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa public FileContext run() throws IOException { return doGetFileContext(); } - }, SecurityUtils.AccBridge.acc()); + }); } catch (PrivilegedActionException pae) { Throwable th = pae.getCause(); if (th instanceof Error) { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index 545e1fc62b5..c86842baf32 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -22,22 +22,14 @@ package org.elasticsearch.repositories.hdfs; import org.apache.hadoop.fs.FileContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.plugin.hadoop.hdfs.Utils; import java.io.IOException; -import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; class SecurityUtils { - abstract static class AccBridge extends Utils { - static AccessControlContext acc() { - return Utils.hadoopACC(); - } - } - static V execute(FileContextFactory fcf, FcCallback callback) throws IOException { return execute(fcf.getFileContext(), callback); } @@ -55,7 +47,7 @@ class SecurityUtils { public V run() throws IOException { return callback.doInHdfs(fc); } - }, AccBridge.acc()); + }); } catch (PrivilegedActionException pae) { Throwable th = pae.getCause(); if (th instanceof Error) { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index 82009d888fe..1cb9f558ee1 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -21,9 +21,6 @@ grant { // used by the plugin to get the TCCL to properly initialize all of Hadoop components permission java.lang.RuntimePermission "getClassLoader"; - // used for DomainCombiner - permission java.security.SecurityPermission "createAccessControlContext"; - // set TCCL used for bootstrapping Hadoop Configuration and JAAS permission java.lang.RuntimePermission "setContextClassLoader"; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java deleted file mode 100644 index b0ef392447e..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.hadoop.hdfs; - -import org.elasticsearch.test.ESTestCase; - -public class UtilsTests extends ESTestCase { - - public void testDetectLibFolder() { - String location = HdfsPlugin.class.getProtectionDomain().getCodeSource().getLocation().toString(); - assertEquals(location, Utils.detectLibFolder()); - } -} From f174e96a1496e3e029999d1703e32237465b212a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sat, 19 Dec 2015 00:21:01 -0500 Subject: [PATCH 159/322] explicitly initialize some hadoop classes elevated, so we don't rely on classloading order. maybe this allows us to do less stuff in doPriv later, we will see. at least it makes things like unit testing easier. --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 25 +++++++++++++++++++ .../plugin-metadata/plugin-security.policy | 12 ++++----- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 100cf73ddcd..ba6d840c181 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -18,6 +18,10 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; +import java.security.AccessController; +import java.security.PrivilegedAction; + +import org.elasticsearch.SpecialPermission; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; @@ -26,6 +30,27 @@ import org.elasticsearch.repositories.hdfs.HdfsRepository; // Code public class HdfsPlugin extends Plugin { + + // initialize some problematic classes with elevated privileges + static { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + Class.forName("org.apache.hadoop.security.UserGroupInformation"); + Class.forName("org.apache.hadoop.util.StringUtils"); + Class.forName("org.apache.hadoop.util.ShutdownHookManager"); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + return null; + } + }); + } @Override public String name() { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index 1cb9f558ee1..2aaf5717275 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -28,13 +28,15 @@ grant { // Hadoop 2 // - // UserGroupInformation (UGI) Metrics + // UserGroupInformation (UGI) Metrics clinit permission java.lang.RuntimePermission "accessDeclaredMembers"; - permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - // Shell initialization - reading system props + // org.apache.hadoop.util.StringUtils clinit permission java.util.PropertyPermission "*", "read,write"; + + // org.apache.hadoop.util.ShutdownHookManager clinit + permission java.lang.RuntimePermission "shutdownHooks"; // UGI triggers JAAS permission javax.security.auth.AuthPermission "getSubject"; @@ -50,10 +52,6 @@ grant { permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; - //permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials \"*\"", "read"; - permission javax.security.auth.AuthPermission "doAs"; - // DFSClient init (metrics again) - permission java.lang.RuntimePermission "shutdownHooks"; }; From 3269beeb4dd71422e9d2998cf4f72260a9018e37 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sat, 19 Dec 2015 02:09:14 -0500 Subject: [PATCH 160/322] don't throw exceptions from ctor, guice is hell --- .../repositories/hdfs/HdfsRepository.java | 46 ++++++++++++------- .../test/hdfs_repository/20_repository.yaml | 2 +- .../test/hdfs_repository/30_snapshot.yaml | 2 +- .../test/hdfs_repository/40_restore.yaml | 2 +- 4 files changed, 32 insertions(+), 20 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 2bda9d5857a..b00c72bad8d 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -55,38 +55,50 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa public final static String TYPE = "hdfs"; - private final HdfsBlobStore blobStore; private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; private final RepositorySettings repositorySettings; + private final ThreadPool threadPool; + private final String path; private FileContext fc; + private HdfsBlobStore blobStore; @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); this.repositorySettings = repositorySettings; + this.threadPool = threadPool; - String path = repositorySettings.settings().get("path", settings.get("path")); + path = repositorySettings.settings().get("path", settings.get("path")); + + + this.basePath = BlobPath.cleanPath(); + this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); + this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + } + + @Override + protected void doStart() { + // get configuration if (path == null) { throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore"); } - - // get configuration - fc = getFileContext(); - Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { - @Override - public Path doInHdfs(FileContext fc) throws IOException { - return fc.makeQualified(new Path(path)); - } - }); - this.basePath = BlobPath.cleanPath(); - - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + try { + fc = getFileContext(); + Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { + @Override + public Path doInHdfs(FileContext fc) throws IOException { + return fc.makeQualified(new Path(path)); + } + }); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); + blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); + } catch (IOException e) { + throw new RuntimeException(e); + } + super.doStart(); } // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index 20cbcef35fd..c01128477ac 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" # Get repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index 95fcd7d5159..dcb521ff2e3 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -12,7 +12,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" - do: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index 767775e4039..09f33aff2e2 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -12,7 +12,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" - do: From 9df447295cae6b11837d6d1f71b35682493c32de Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sat, 19 Dec 2015 02:43:27 -0500 Subject: [PATCH 161/322] Fix unit tests (also works from IDE). --- .../repositories/hdfs/TestingFs.java | 57 ----------------- .../plugin/hadoop/hdfs/HdfsTests.java | 62 +++---------------- .../plugin/hadoop/hdfs/TestingFs.java | 60 ++++++++++++++++++ 3 files changed, 70 insertions(+), 109 deletions(-) delete mode 100644 plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java create mode 100644 plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java deleted file mode 100644 index 46cb0a263fe..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.hdfs; - -import org.apache.hadoop.fs.LocalFileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RawLocalFileSystem; -import org.elasticsearch.common.SuppressForbidden; - -import java.io.File; -import java.io.IOException; - -/** - * Extends LFS to improve some operations to keep the security permissions at - * bay. In particular mkdir is smarter and doesn't have to walk all the file - * hierarchy but rather only limits itself to the parent/working dir and creates - * a file only when necessary. - */ -public class TestingFs extends LocalFileSystem { - - private static class ImprovedRawLocalFileSystem extends RawLocalFileSystem { - @Override - @SuppressForbidden(reason = "the Hadoop API depends on java.io.File") - public boolean mkdirs(Path f) throws IOException { - File wd = pathToFile(getWorkingDirectory()); - File local = pathToFile(f); - if (wd.equals(local) || local.exists()) { - return true; - } - return mkdirs(f.getParent()) && local.mkdir(); - } - } - - public TestingFs() { - super(new ImprovedRawLocalFileSystem()); - // use the build path instead of the starting dir as that one has read permissions - //setWorkingDirectory(new Path(getClass().getProtectionDomain().getCodeSource().getLocation().toString())); - setWorkingDirectory(new Path(System.getProperty("java.io.tmpdir"))); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index 3f643aebe94..68406441b97 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -26,30 +26,19 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryMissingException; -import org.elasticsearch.repositories.hdfs.TestingFs; import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.store.MockFSDirectoryService; -import org.junit.After; -import org.junit.Before; +import java.nio.file.Path; import java.util.Collection; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -/** - * You must specify {@code -Dtests.thirdparty=true} - */ -// Make sure to start MiniHDFS cluster before -// otherwise, one will get some wierd PrivateCredentialPermission exception -// caused by the HDFS fallback code (which doesn't do much anyway) - -// @ThirdParty @ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) public class HdfsTests extends ESIntegTestCase { @@ -78,31 +67,17 @@ public class HdfsTests extends ESIntegTestCase { return pluginList(HdfsPlugin.class); } - private String path; - private int port; - - @Before - public final void wipeBefore() throws Exception { - wipeRepositories(); - //port = MiniHDFS.getPort(); - //path = "build/data/repo-" + randomInt(); - } - - @After - public final void wipeAfter() throws Exception { - wipeRepositories(); - } - public void testSimpleWorkflow() { Client client = client(); + String path = "foo"; logger.info("--> creating hdfs repository with path [{}]", path); + Path dir = createTempDir(); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", "hdfs://127.0.0.1:" + port) - .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - // .put("uri", "es-hdfs:///") + .put("uri", dir.toUri().toString()) + .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) .put("path", path) .put("conf", "additional-cfg.xml, conf-2.xml") .put("chunk_size", randomIntBetween(100, 1000) + "k") @@ -177,17 +152,17 @@ public class HdfsTests extends ESIntegTestCase { // RepositoryVerificationException.class public void testWrongPath() { + Path dir = createTempDir(); + Client client = client(); - logger.info("--> creating hdfs repository with path [{}]", path); try { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", "hdfs://127.0.0.1:" + port) - // .put("uri", "es-hdfs:///") - .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - .put("path", path + "a@b$c#11:22") + .put("uri", dir.toUri().toString()) + .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) + .put("path", "a@b$c#11:22") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean())) .get(); @@ -201,23 +176,6 @@ public class HdfsTests extends ESIntegTestCase { } } - /** - * Deletes repositories, supports wildcard notation. - */ - public static void wipeRepositories(String... repositories) { - // if nothing is provided, delete all - if (repositories.length == 0) { - repositories = new String[]{"*"}; - } - for (String repository : repositories) { - try { - client().admin().cluster().prepareDeleteRepository(repository).execute().actionGet(); - } catch (RepositoryMissingException ex) { - // ignore - } - } - } - private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java new file mode 100644 index 00000000000..548ac75af72 --- /dev/null +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.hadoop.hdfs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.DelegateToFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RawLocalFileSystem; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.lucene.util.LuceneTestCase; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Extends LFS to improve some operations to keep the security permissions at + * bay. In particular it never tries to execute! + */ +public class TestingFs extends DelegateToFileSystem { + + private static class ImprovedRawLocalFileSystem extends RawLocalFileSystem { + @Override + public Path getInitialWorkingDirectory() { + // sets working dir to a tmp dir for testing + return new Path(LuceneTestCase.createTempDir().toString()); + } + + @Override + public void setPermission(Path p, FsPermission permission) { + // no execution, thank you very much! + } + } + + public TestingFs(URI uri, Configuration configuration) throws URISyntaxException, IOException { + super(URI.create("file:///"), new ImprovedRawLocalFileSystem(), configuration, "file", false); + } + + @Override + public void checkPath(Path path) { + // we do evil stuff, we admit it. + } +} From 583ba76d0ba40d6771487372a18024cf2aa4d7bb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 19 Dec 2015 00:06:29 -0800 Subject: [PATCH 162/322] Build: Allow plugins to "upgrade" provided deps This only really applies to slf4j, since the others are in the distribution, but it is necessary if a plugin wants to depend on slf4j. --- .../elasticsearch/gradle/plugin/PluginBuildPlugin.groovy | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index db3f3eca50a..7f92bde4b42 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -22,6 +22,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project +import org.gradle.api.artifacts.Dependency import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -99,7 +100,12 @@ public class PluginBuildPlugin extends BuildPlugin { from buildProperties // plugin properties file from pluginMetadata // metadata (eg custom security policy) from project.jar // this plugin's jar - from project.configurations.runtime - project.configurations.provided // the dep jars + // the dependency jars, filtered for those provided + from project.configurations.runtime.fileCollection { Dependency dep -> + project.configurations.provided.contains(dep) == false || + // handle dependencies upgraded explicitly from provided to compile + project.configurations.compile.contains(dep) + } // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { From e2b2ee24fa48c9ff9ea026aa3d505ce12b699bc6 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sat, 19 Dec 2015 03:06:40 -0500 Subject: [PATCH 163/322] Add licensing for dependencies --- .../commons-collections-3.2.2.jar.sha1 | 1 + .../licenses/commons-collections-LICENSE.txt | 202 +++++++++++++++ .../licenses/commons-collections-NOTICE.txt | 5 + .../commons-configuration-1.6.jar.sha1 | 1 + .../commons-configuration-LICENSE.txt | 202 +++++++++++++++ .../licenses/commons-configuration-NOTICE.txt | 5 + .../licenses/commons-io-2.4.jar.sha1 | 1 + .../licenses/commons-io-LICENSE.txt | 202 +++++++++++++++ .../licenses/commons-io-NOTICE.txt | 6 + .../licenses/commons-lang-2.6.jar.sha1 | 1 + .../licenses/commons-lang-LICENSE.txt | 202 +++++++++++++++ .../licenses/commons-lang-NOTICE.txt | 9 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 + .../licenses/commons-logging-LICENSE.txt | 202 +++++++++++++++ .../licenses/commons-logging-NOTICE.txt | 6 + .../licenses/guava-16.0.1.jar.sha1 | 1 + .../licenses/guava-LICENSE.txt | 202 +++++++++++++++ .../repository-hdfs/licenses/guava-NOTICE.txt | 2 + .../licenses/hadoop-LICENSE.txt | 202 +++++++++++++++ .../licenses/hadoop-NOTICE.txt | 2 + .../hadoop-annotations-2.7.1.jar.sha1 | 1 + .../licenses/hadoop-auth-2.7.1.jar.sha1 | 1 + .../licenses/hadoop-client-2.7.1.jar.sha1 | 1 + .../licenses/hadoop-common-2.7.1.jar.sha1 | 1 + .../licenses/hadoop-hdfs-2.7.1.jar.sha1 | 1 + .../htrace-core-3.1.0-incubating.jar.sha1 | 1 + .../licenses/htrace-core-LICENSE.txt | 242 ++++++++++++++++++ .../licenses/htrace-core-NOTICE.txt | 13 + .../licenses/protobuf-java-2.5.0.jar.sha1 | 1 + .../licenses/protobuf-java-LICENSE.txt | 10 + .../licenses/protobuf-java-NOTICE.txt | 2 + .../licenses/servlet-api-2.5.jar.sha1 | 1 + .../licenses/servlet-api-LICENSE.txt | 93 +++++++ .../licenses/servlet-api-NOTICE.txt | 2 + 34 files changed, 1825 insertions(+) create mode 100644 plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-io-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-io-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/guava-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/guava-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/hadoop-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/hadoop-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt diff --git a/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 b/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 new file mode 100644 index 00000000000..e9eeffde5da --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 @@ -0,0 +1 @@ +8ad72fe39fa8c91eaaf12aadb21e0c3661fe26d5 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt new file mode 100644 index 00000000000..7f8a95f8bbf --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons Collections +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 b/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 new file mode 100644 index 00000000000..44ad1f6d8da --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 @@ -0,0 +1 @@ +32cadde23955d7681b0d94a2715846d20b425235 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt new file mode 100644 index 00000000000..3d6dfaec547 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons Configuration +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 new file mode 100644 index 00000000000..2f5b30d0edb --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 @@ -0,0 +1 @@ +b1b6ea3b7e4aa4f492509a4952029cd8e48019ad \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-io-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-io-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt new file mode 100644 index 00000000000..7b27516f07f --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt @@ -0,0 +1,6 @@ +Apache Commons IO +Copyright 2002-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 new file mode 100644 index 00000000000..4ee9249d2b7 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 @@ -0,0 +1 @@ +0ce1edb914c94ebc388f086c6827e8bdeec71ac2 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt new file mode 100644 index 00000000000..8dfa22157ab --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt @@ -0,0 +1,9 @@ +Apache Commons Lang +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the Spring Framework, +under the Apache License 2.0 (see: StringUtils.containsWhitespace()) + diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..5b8f029e582 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..556bd03951d --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,6 @@ +Apache Commons Logging +Copyright 2003-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 b/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 new file mode 100644 index 00000000000..68f2b233a00 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 @@ -0,0 +1 @@ +5fa98cd1a63c99a44dd8d3b77e4762b066a5d0c5 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/guava-LICENSE.txt b/plugins/repository-hdfs/licenses/guava-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/guava-NOTICE.txt b/plugins/repository-hdfs/licenses/guava-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-NOTICE.txt @@ -0,0 +1,2 @@ + + diff --git a/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt b/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt b/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt new file mode 100644 index 00000000000..62fc5816c99 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt @@ -0,0 +1,2 @@ +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 new file mode 100644 index 00000000000..660467a4c6e --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 @@ -0,0 +1 @@ +2a77fe74ee056bf45598cf7e20cd624e8388e627 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 new file mode 100644 index 00000000000..0161301ead2 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 @@ -0,0 +1 @@ +2515f339f97f1d7ba850485e06e395a58586bc2e \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 new file mode 100644 index 00000000000..4c6dca8af49 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 @@ -0,0 +1 @@ +dbc2faacd210e6a1e3eb7def6e42065c7457d960 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 new file mode 100644 index 00000000000..64ff368db60 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 @@ -0,0 +1 @@ +50580f5ebab60b1b318ad157f668d8e40a1cc0da \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 new file mode 100644 index 00000000000..2d4954befaa --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 @@ -0,0 +1 @@ +11681de93a4cd76c841e352b7094f839b072a21f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 b/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 new file mode 100644 index 00000000000..c742d8397cf --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 @@ -0,0 +1 @@ +f73606e7c9ede5802335c290bf47490ad6d51df3 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt b/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt new file mode 100644 index 00000000000..0befae88d4f --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt @@ -0,0 +1,242 @@ +Apache HTrace (incubating) is Apache 2.0 Licensed. See below for licensing +of dependencies that are NOT Apache Licensed. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The HTrace Owl logo is from http://www.clker.com/clipart-13653.html. It is +public domain. + +D3, a javascript library for manipulating data, used by htrace-hbase +is Copyright 2010-2014, Michael Bostock and BSD licensed: +https://github.com/mbostock/d3/blob/master/LICENSE + +Bootstrap, an html, css, and javascript framework, is +Copyright (c) 2011-2015 Twitter, Inc and MIT licensed: +https://github.com/twbs/bootstrap/blob/master/LICENSE + +underscore, a javascript library of functional programming helpers, is +(c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters +& Editors and an MIT license: +https://github.com/jashkenas/underscore/blob/master/LICENSE + +jquery, a javascript library, is Copyright jQuery Foundation and other +contributors, https://jquery.org/. The software consists of +voluntary contributions made by many individuals. For exact +contribution history, see the revision history +available at https://github.com/jquery/jquery +It is MIT licensed: +https://github.com/jquery/jquery/blob/master/LICENSE.txt + +backbone, is a javascript library, that is Copyright (c) 2010-2014 +Jeremy Ashkenas, DocumentCloud. It is MIT licensed: +https://github.com/jashkenas/backbone/blob/master/LICENSE + +moment.js is a front end time conversion project. +It is (c) 2011-2014 Tim Wood, Iskren Chernev, Moment.js contributors +and shared under the MIT license: +https://github.com/moment/moment/blob/develop/LICENSE + +CMP is an implementation of the MessagePack serialization format in +C. It is licensed under the MIT license: +https://github.com/camgunz/cmp/blob/master/LICENSE +See ./htrace-c/src/util/cmp.c and ./htrace-c/src/util/cmp.h. diff --git a/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt b/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt new file mode 100644 index 00000000000..845b6965e04 --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt @@ -0,0 +1,13 @@ +Apache HTrace +Copyright 2015 The Apache Software Foundation + +This product includes software developed at The Apache Software +Foundation (http://www.apache.org/). + +In addition, this product includes software dependencies. See +the accompanying LICENSE.txt for a listing of dependencies +that are NOT Apache licensed (with pointers to their licensing) + +Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin +is a distributed tracing system that is Apache 2.0 Licensed. +Copyright 2012 Twitter, Inc. diff --git a/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 new file mode 100644 index 00000000000..71f918819e2 --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 @@ -0,0 +1 @@ +a10732c76bfacdbd633a7eb0f7968b1059a65dfa \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt b/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt new file mode 100644 index 00000000000..49e7019ac5a --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt @@ -0,0 +1,10 @@ +Copyright (c) , +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt b/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt @@ -0,0 +1,2 @@ + + diff --git a/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 b/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 new file mode 100644 index 00000000000..08564097ebe --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 @@ -0,0 +1 @@ +5959582d97d8b61f4d154ca9e495aafd16726e34 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt b/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt new file mode 100644 index 00000000000..2b93f7d2e73 --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt @@ -0,0 +1,93 @@ + COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 1. + +Definitions. + +1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. + +1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. + +1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. + +1.4. Executable means the Covered Software in any form other than Source Code. + +1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. + +1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. + +1.7. License means this document. + +1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. + +1.9. Modifications means the Source Code and Executable form of any of the following: A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; B. Any new file that contains any part of the Original Software or previous Modification; or C. Any new file that is contributed or otherwise made available under the terms of this License. + +1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. + +1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. + +1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. + +1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof); + + (c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License; + + (d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices. + +2.2. Contributor Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). + +(c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. + +(d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. + +3. Distribution Obligations. + +3.1. Availability of Source Code. Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. + +3.2. Modifications. The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. + +3.3. Required Notices. You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. + +3.4. Application of Additional Terms. You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. + +3.5. Distribution of Executable Versions. You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. + +3.6. Larger Works. You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + +4.1. New Versions. Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. + +4.2. Effect of New Versions. You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. + +4.3. Modified Versions. When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License. + +5. DISCLAIMER OF WARRANTY. COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +6. TERMINATION. + +6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. + +6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. + +6.3. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. The Covered Software is a commercial item, as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. + +9. MISCELLANEOUS. This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. + +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. diff --git a/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt b/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt @@ -0,0 +1,2 @@ + + From ad46a09cf5aa3d0228a4f8bcdf1861d2ecdecff1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 19 Dec 2015 01:21:53 -0800 Subject: [PATCH 164/322] Revert "Build: Allow plugins to "upgrade" provided deps" This reverts commit 583ba76d0ba40d6771487372a18024cf2aa4d7bb. --- .../elasticsearch/gradle/plugin/PluginBuildPlugin.groovy | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 7f92bde4b42..db3f3eca50a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -22,7 +22,6 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project -import org.gradle.api.artifacts.Dependency import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -100,12 +99,7 @@ public class PluginBuildPlugin extends BuildPlugin { from buildProperties // plugin properties file from pluginMetadata // metadata (eg custom security policy) from project.jar // this plugin's jar - // the dependency jars, filtered for those provided - from project.configurations.runtime.fileCollection { Dependency dep -> - project.configurations.provided.contains(dep) == false || - // handle dependencies upgraded explicitly from provided to compile - project.configurations.compile.contains(dep) - } + from project.configurations.runtime - project.configurations.provided // the dep jars // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { From d171773bdb33bdfec10da6edc135c9cb62701f23 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sat, 19 Dec 2015 04:39:01 -0500 Subject: [PATCH 165/322] remove leniency in tests --- .../plugin/hadoop/hdfs/HdfsTests.java | 39 ++----------------- .../test/hdfs_repository/20_repository.yaml | 1 - .../test/hdfs_repository/30_snapshot.yaml | 1 - .../test/hdfs_repository/40_restore.yaml | 1 - 4 files changed, 4 insertions(+), 38 deletions(-) diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index 68406441b97..9728c58203d 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -31,9 +31,7 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.store.MockFSDirectoryService; -import java.nio.file.Path; import java.util.Collection; import static org.hamcrest.Matchers.equalTo; @@ -42,26 +40,6 @@ import static org.hamcrest.Matchers.greaterThan; @ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) public class HdfsTests extends ESIntegTestCase { - @Override - public Settings indexSettings() { - return Settings.builder() - .put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) - .build(); - } - - @Override - protected Settings nodeSettings(int ordinal) { - Settings.Builder settings = Settings.builder() - .put(super.nodeSettings(ordinal)) - .put("path.home", createTempDir()) - .put("path.repo", "") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false); - return settings.build(); - } - @Override protected Collection> nodePlugins() { return pluginList(HdfsPlugin.class); @@ -69,16 +47,13 @@ public class HdfsTests extends ESIntegTestCase { public void testSimpleWorkflow() { Client client = client(); - String path = "foo"; - logger.info("--> creating hdfs repository with path [{}]", path); - Path dir = createTempDir(); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", dir.toUri().toString()) + .put("uri", "file:///") .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) - .put("path", path) + .put("path", "foo") .put("conf", "additional-cfg.xml, conf-2.xml") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean()) @@ -135,7 +110,7 @@ public class HdfsTests extends ESIntegTestCase { // Test restore after index deletion logger.info("--> delete indices"); - wipeIndices("test-idx-1", "test-idx-2"); + cluster().wipeIndices("test-idx-1", "test-idx-2"); logger.info("--> restore one index after deletion"); restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); @@ -146,21 +121,15 @@ public class HdfsTests extends ESIntegTestCase { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); } - private void wipeIndices(String... indices) { - cluster().wipeIndices(indices); - } - // RepositoryVerificationException.class public void testWrongPath() { - Path dir = createTempDir(); - Client client = client(); try { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", dir.toUri().toString()) + .put("uri", "file:///") .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) .put("path", "a@b$c#11:22") .put("chunk_size", randomIntBetween(100, 1000) + "k") diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index c01128477ac..92fa3aba44c 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -6,7 +6,6 @@ - do: snapshot.create_repository: repository: test_repo_hdfs_1 - verify: false body: type: hdfs settings: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index dcb521ff2e3..38aed590f98 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -8,7 +8,6 @@ setup: - do: snapshot.create_repository: repository: test_repo_hdfs_snapshot - verify: false body: type: hdfs settings: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index 09f33aff2e2..b1a8f78b699 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -8,7 +8,6 @@ setup: - do: snapshot.create_repository: repository: test_repo_hdfs_restore - verify: false body: type: hdfs settings: From 5dcccca848d87795d005d5500f1c404f54819f0b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 15 Dec 2015 22:59:36 -0500 Subject: [PATCH 166/322] add example fixture --- .../jvm-example/example-fixture/build.gradle | 20 ++++++ .../main/java/example/ExampleTestFixture.java | 72 +++++++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 plugins/jvm-example/example-fixture/build.gradle create mode 100644 plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java diff --git a/plugins/jvm-example/example-fixture/build.gradle b/plugins/jvm-example/example-fixture/build.gradle new file mode 100644 index 00000000000..7761437b8f8 --- /dev/null +++ b/plugins/jvm-example/example-fixture/build.gradle @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'java' diff --git a/plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java b/plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java new file mode 100644 index 00000000000..e69bb97d7ca --- /dev/null +++ b/plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package example; + +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Collections; + +/** Crappy example test fixture that responds with TEST and closes the connection */ +public class ExampleTestFixture { + public static void main(String args[]) throws Exception { + if (args.length != 1) { + throw new IllegalArgumentException("ExampleTestFixture "); + } + AsynchronousServerSocketChannel server = AsynchronousServerSocketChannel + .open() + .bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + // write port file + Path tmp = Files.createTempFile(null, null); + InetSocketAddress bound = (InetSocketAddress) server.getLocalAddress(); + if (bound.getAddress() instanceof Inet6Address) { + Files.write(tmp, Collections.singleton("[" + bound.getHostString() + "]:" + bound.getPort())); + } else { + Files.write(tmp, Collections.singleton(bound.getHostString() + ":" + bound.getPort())); + } + Files.move(tmp, Paths.get(args[0]), StandardCopyOption.ATOMIC_MOVE); + // go time + server.accept(null, new CompletionHandler() { + @Override + public void completed(AsynchronousSocketChannel socket, Void attachment) { + server.accept(null, this); + try (AsynchronousSocketChannel ch = socket) { + ch.write(ByteBuffer.wrap("TEST\n".getBytes(StandardCharsets.UTF_8))).get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void failed(Throwable exc, Void attachment) {} + }); + // wait forever, until you kill me + Thread.sleep(Long.MAX_VALUE); + } +} From 8c6f5a0c60f08fc0903d84abd8fc09162c2dba0e Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 15 Dec 2015 23:09:40 -0500 Subject: [PATCH 167/322] add failing test --- .../plugin/example/ExampleExternalIT.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java new file mode 100644 index 00000000000..d8ebda31fc3 --- /dev/null +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.example; + +import org.elasticsearch.test.ESTestCase; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.net.Socket; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class ExampleExternalIT extends ESTestCase { + public void testExample() throws Exception { + String host = Objects.requireNonNull(System.getProperty("external.host")); + int port = Integer.parseInt(System.getProperty("external.port")); + try (Socket socket = new Socket(host, port); + BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) { + assertEquals("TEST", reader.readLine()); + } + } +} From 9cb4c82c582d84925af5da0e31292bdb394b17c1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Dec 2015 20:14:37 -0800 Subject: [PATCH 168/322] Build: Add fixture capabilities to integ tests This change adds a Fixture class for use by gradle. A Fixture is an external process that integration tests will use. It can be added as a dependsOn for integTest, and will automatically be shutdown upon success or failure, as well as relevant information dumped on failure. There is also an example fixture in this change. --- .../org/elasticsearch/gradle/AntTask.groovy | 8 +- .../elasticsearch/gradle/test/Fixture.groovy | 287 ++++++++++++++++++ .../gradle/test/RestIntegTestTask.groovy | 23 ++ plugins/build.gradle | 3 +- plugins/jvm-example/build.gradle | 21 ++ .../plugin/example/ExampleExternalIT.java | 9 +- qa/smoke-test-plugins/build.gradle | 2 +- settings.gradle | 1 + test/build.gradle | 22 +- test/fixtures/build.gradle | 0 .../fixtures}/example-fixture/build.gradle | 2 +- .../main/java/example/ExampleTestFixture.java | 17 +- test/framework/build.gradle | 13 - 13 files changed, 381 insertions(+), 27 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy create mode 100644 test/fixtures/build.gradle rename {plugins/jvm-example => test/fixtures}/example-fixture/build.gradle (95%) rename {plugins/jvm-example => test/fixtures}/example-fixture/src/main/java/example/ExampleTestFixture.java (84%) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 1df6306400b..5d7486371eb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -19,13 +19,13 @@ package org.elasticsearch.gradle -import org.apache.tools.ant.BuildException import org.apache.tools.ant.BuildListener import org.apache.tools.ant.BuildLogger import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.Project import org.gradle.api.DefaultTask import org.gradle.api.GradleException +import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskAction import java.nio.charset.Charset @@ -58,14 +58,14 @@ public abstract class AntTask extends DefaultTask { ant.project.removeBuildListener(listener) } - final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : (logger.isInfoEnabled() ? Project.MSG_INFO : Project.MSG_WARN) + final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) BuildLogger antLogger = makeLogger(stream, outputLevel) ant.project.addBuildListener(antLogger) try { runAnt(ant) - } catch (BuildException e) { + } catch (Exception e) { // ant failed, so see if we have buffered output to emit, then rethrow the failure String buffer = outputBuffer.toString() if (buffer.isEmpty() == false) { @@ -76,7 +76,7 @@ public abstract class AntTask extends DefaultTask { } /** Runs the doAnt closure. This can be overridden by subclasses instead of having to set a closure. */ - protected abstract void runAnt(AntBuilder ant); + protected abstract void runAnt(AntBuilder ant) /** Create the logger the ant runner will use, with the given stream for error/output. */ protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy new file mode 100644 index 00000000000..46b81624ba3 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy @@ -0,0 +1,287 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test + +import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.AntTask +import org.elasticsearch.gradle.LoggedExec +import org.gradle.api.GradleException +import org.gradle.api.Task +import org.gradle.api.tasks.Exec +import org.gradle.api.tasks.Input + +/** + * A fixture for integration tests which runs in a separate process. + */ +public class Fixture extends AntTask { + + /** The path to the executable that starts the fixture. */ + @Input + String executable + + private final List arguments = new ArrayList<>() + + @Input + public void args(Object... args) { + arguments.addAll(args) + } + + /** + * Environment variables for the fixture process. The value can be any object, which + * will have toString() called at execution time. + */ + private final Map environment = new HashMap<>() + + @Input + public void env(String key, Object value) { + environment.put(key, value) + } + + /** A flag to indicate whether the command should be executed from a shell. */ + @Input + boolean useShell = false + + /** + * A flag to indicate whether the fixture should be run in the foreground, or spawned. + * It is protected so subclasses can override (eg RunTask). + */ + protected boolean spawn = true + + /** + * A closure to call before the fixture is considered ready. The closure is passed the fixture object, + * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait + * condition is for http on the http port. + */ + @Input + Closure waitCondition = { Fixture fixture, AntBuilder ant -> + File tmpFile = new File(fixture.cwd, 'wait.success') + ant.get(src: "http://${fixture.addressAndPort}", + dest: tmpFile.toString(), + ignoreerrors: true, // do not fail on error, so logging information can be flushed + retries: 10) + return tmpFile.exists() + } + + /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ + public final Task stopTask + + public Fixture() { + stopTask = createStopTask() + finalizedBy(stopTask) + } + + @Override + protected void runAnt(AntBuilder ant) { + project.delete(baseDir) // reset everything + cwd.mkdirs() + final String realExecutable + final List realArgs = new ArrayList<>() + final Map realEnv = environment + // We need to choose which executable we are using. In shell mode, or when we + // are spawning and thus using the wrapper script, the executable is the shell. + if (useShell || spawn) { + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + realExecutable = 'cmd' + realArgs.add('/C') + realArgs.add('"') // quote the entire command + } else { + realExecutable = 'sh' + } + } else { + realExecutable = executable + realArgs.addAll(arguments) + } + if (spawn) { + writeWrapperScript(executable) + realArgs.add(wrapperScript) + realArgs.addAll(arguments) + } + if (Os.isFamily(Os.FAMILY_WINDOWS) && (useShell || spawn)) { + realArgs.add('"') + } + commandString.eachLine { line -> logger.info(line) } + + ant.exec(executable: realExecutable, spawn: spawn, dir: cwd, taskname: name) { + realEnv.each { key, value -> env(key: key, value: value) } + realArgs.each { arg(value: it) } + } + + String failedProp = "failed${name}" + // first wait for resources, or the failure marker from the wrapper script + ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) { + or { + resourceexists { + file(file: failureMarker.toString()) + } + and { + resourceexists { + file(file: pidFile.toString()) + } + resourceexists { + file(file: portsFile.toString()) + } + } + } + } + + if (ant.project.getProperty(failedProp) || failureMarker.exists()) { + fail("Failed to start ${name}") + } + + // the process is started (has a pid) and is bound to a network interface + // so now wait undil the waitCondition has been met + // TODO: change this to a loop? + boolean success + try { + success = waitCondition(this, ant) == false + } catch (Exception e) { + String msg = "Wait condition caught exception for ${name}" + logger.error(msg, e) + fail(msg, e) + } + if (success == false) { + fail("Wait condition failed for ${name}") + } + } + + /** Returns a debug string used to log information about how the fixture was run. */ + protected String getCommandString() { + String commandString = "\n${name} configuration:\n" + commandString += "-----------------------------------------\n" + commandString += " cwd: ${cwd}\n" + commandString += " command: ${executable} ${arguments.join(' ')}\n" + commandString += ' environment:\n' + environment.each { k, v -> commandString += " ${k}: ${v}\n" } + if (spawn) { + commandString += "\n [${wrapperScript.name}]\n" + wrapperScript.eachLine('UTF-8', { line -> commandString += " ${line}\n"}) + } + return commandString + } + + /** + * Writes a script to run the real executable, so that stdout/stderr can be captured. + * TODO: this could be removed if we do use our own ProcessBuilder and pump output from the process + */ + private void writeWrapperScript(String executable) { + wrapperScript.parentFile.mkdirs() + String argsPasser = '"$@"' + String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi" + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + argsPasser = '%*' + exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )" + } + wrapperScript.setText("\"${executable}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8') + } + + /** Fail the build with the given message, and logging relevant info*/ + private void fail(String msg, Exception... suppressed) { + if (logger.isInfoEnabled() == false) { + // We already log the command at info level. No need to do it twice. + commandString.eachLine { line -> logger.error(line) } + } + logger.error("${name} output:") + logger.error("-----------------------------------------") + logger.error(" failure marker exists: ${failureMarker.exists()}") + logger.error(" pid file exists: ${pidFile.exists()}") + logger.error(" ports file exists: ${portsFile.exists()}") + // also dump the log file for the startup script (which will include ES logging output to stdout) + if (runLog.exists()) { + logger.error("\n [log]") + runLog.eachLine { line -> logger.error(" ${line}") } + } + logger.error("-----------------------------------------") + GradleException toThrow = new GradleException(msg) + for (Exception e : suppressed) { + toThrow.addSuppressed(e) + } + throw toThrow + } + + /** Adds a task to kill an elasticsearch node with the given pidfile */ + private Task createStopTask() { + final Fixture fixture = this + final Object pid = "${ -> fixture.pid }" + Exec stop = project.tasks.create(name: "${name}#stop", type: LoggedExec) + stop.onlyIf { fixture.pidFile.exists() } + stop.doFirst { + logger.info("Shutting down ${fixture.name} with pid ${pid}") + } + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + stop.executable = 'Taskkill' + stop.args('/PID', pid, '/F') + } else { + stop.executable = 'kill' + stop.args('-9', pid) + } + stop.doLast { + project.delete(fixture.pidFile) + } + return stop + } + + /** + * A path relative to the build dir that all configuration and runtime files + * will live in for this fixture + */ + protected File getBaseDir() { + return new File(project.buildDir, "fixtures/${name}") + } + + /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ + protected File getCwd() { + return new File(baseDir, 'cwd') + } + + /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ + protected File getPidFile() { + return new File(baseDir, 'pid') + } + + /** Reads the pid file and returns the process' pid */ + public int getPid() { + return Integer.parseInt(pidFile.getText('UTF-8').trim()) + } + + /** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */ + protected File getPortsFile() { + return new File(baseDir, 'ports') + } + + /** Returns an address and port suitable for a uri to connect to this node over http */ + public String getAddressAndPort() { + return portsFile.readLines("UTF-8").get(0) + } + + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ + protected File getWrapperScript() { + return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run') + } + + /** Returns a file that the wrapper script writes when the command failed. */ + protected File getFailureMarker() { + return new File(cwd, 'run.failed') + } + + /** Returns a file that the wrapper script writes when the command failed. */ + protected File getRunLog() { + return new File(cwd, 'run.log') + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 75612bb552e..5656be57b8f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,6 +20,8 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin +import org.gradle.api.GradleException +import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.tasks.Input @@ -80,4 +82,25 @@ public class RestIntegTestTask extends RandomizedTestingTask { public ClusterConfiguration getCluster() { return clusterConfig } + + @Override + public Task dependsOn(Object... dependencies) { + super.dependsOn(dependencies) + for (Object dependency : dependencies) { + if (dependency instanceof Fixture) { + finalizedBy(((Fixture)dependency).stopTask) + } + } + return this + } + + @Override + public void setDependsOn(Iterable dependencies) { + super.setDependsOn(dependencies) + for (Object dependency : dependencies) { + if (dependency instanceof Fixture) { + finalizedBy(((Fixture)dependency).stopTask) + } + } + } } diff --git a/plugins/build.gradle b/plugins/build.gradle index bdcc604a296..e49b08c6015 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -17,7 +17,8 @@ * under the License. */ -subprojects { +// only configure immediate children of plugins dir +configure(subprojects.findAll { it.parent.path == project.path }) { group = 'org.elasticsearch.plugin' apply plugin: 'elasticsearch.esplugin' diff --git a/plugins/jvm-example/build.gradle b/plugins/jvm-example/build.gradle index d8440eaecad..f0dd69ff8c4 100644 --- a/plugins/jvm-example/build.gradle +++ b/plugins/jvm-example/build.gradle @@ -27,3 +27,24 @@ test.enabled = false compileJava.options.compilerArgs << "-Xlint:-rawtypes" +configurations { + exampleFixture +} + +dependencies { + exampleFixture project(':test:fixtures:example-fixture') +} + +task exampleFixture(type: org.elasticsearch.gradle.test.Fixture) { + dependsOn project.configurations.exampleFixture + executable = new File(project.javaHome, 'bin/java') + args '-cp', "${ -> project.configurations.exampleFixture.asPath }", + 'example.ExampleTestFixture', + baseDir +} + +integTest { + dependsOn exampleFixture + systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" +} + diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java index d8ebda31fc3..1f48549aad4 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java @@ -23,15 +23,18 @@ import org.elasticsearch.test.ESTestCase; import java.io.BufferedReader; import java.io.InputStreamReader; +import java.net.InetAddress; import java.net.Socket; +import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Objects; public class ExampleExternalIT extends ESTestCase { public void testExample() throws Exception { - String host = Objects.requireNonNull(System.getProperty("external.host")); - int port = Integer.parseInt(System.getProperty("external.port")); - try (Socket socket = new Socket(host, port); + String stringAddress = Objects.requireNonNull(System.getProperty("external.address")); + URL url = new URL("http://" + stringAddress); + InetAddress address = InetAddress.getByName(url.getHost()); + try (Socket socket = new Socket(address, url.getPort()); BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) { assertEquals("TEST", reader.readLine()); } diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index 9d8e3950a83..bc8eace704e 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -22,7 +22,7 @@ import org.elasticsearch.gradle.MavenFilteringHack apply plugin: 'elasticsearch.rest-test' ext.pluginsCount = 0 -project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each { subproj -> +project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> integTest { cluster { // need to get a non-decorated project object, so must re-lookup the project by path diff --git a/settings.gradle b/settings.gradle index 3526c0429ef..e2c63dcfed5 100644 --- a/settings.gradle +++ b/settings.gradle @@ -9,6 +9,7 @@ List projects = [ 'distribution:deb', 'distribution:rpm', 'test:framework', + 'test:fixtures:example-fixture', 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', diff --git a/test/build.gradle b/test/build.gradle index 037bb8d508e..564f8673307 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -17,7 +17,27 @@ * under the License. */ +import org.elasticsearch.gradle.precommit.PrecommitTasks + subprojects { + // fixtures is just an intermediate parent project + if (name == 'fixtures') return + group = 'org.elasticsearch.test' - apply plugin: 'com.bmuschko.nexus' + apply plugin: 'elasticsearch.build' + + + // the main files are actually test files, so use the appopriate forbidden api sigs + forbiddenApisMain { + bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] + signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), + PrecommitTasks.getResource('/forbidden/test-signatures.txt')] + } + + // TODO: should we have licenses for our test deps? + dependencyLicenses.enabled = false + + // TODO: why is the test framework pulled in... + forbiddenApisMain.enabled = false + jarHell.enabled = false } diff --git a/test/fixtures/build.gradle b/test/fixtures/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/plugins/jvm-example/example-fixture/build.gradle b/test/fixtures/example-fixture/build.gradle similarity index 95% rename from plugins/jvm-example/example-fixture/build.gradle rename to test/fixtures/example-fixture/build.gradle index 7761437b8f8..4c94aa93be4 100644 --- a/plugins/jvm-example/example-fixture/build.gradle +++ b/test/fixtures/example-fixture/build.gradle @@ -17,4 +17,4 @@ * under the License. */ -apply plugin: 'java' +apply plugin: 'elasticsearch.build' diff --git a/plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java b/test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java similarity index 84% rename from plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java rename to test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java index e69bb97d7ca..603aba1fc63 100644 --- a/plugins/jvm-example/example-fixture/src/main/java/example/ExampleTestFixture.java +++ b/test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java @@ -19,6 +19,7 @@ package example; +import java.lang.management.ManagementFactory; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; @@ -37,20 +38,29 @@ import java.util.Collections; public class ExampleTestFixture { public static void main(String args[]) throws Exception { if (args.length != 1) { - throw new IllegalArgumentException("ExampleTestFixture "); + throw new IllegalArgumentException("ExampleTestFixture "); } + Path dir = Paths.get(args[0]); AsynchronousServerSocketChannel server = AsynchronousServerSocketChannel .open() .bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + + // write pid file + Path tmp = Files.createTempFile(dir, null, null); + String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + Files.write(tmp, Collections.singleton(pid)); + Files.move(tmp, dir.resolve("pid"), StandardCopyOption.ATOMIC_MOVE); + // write port file - Path tmp = Files.createTempFile(null, null); + tmp = Files.createTempFile(dir, null, null); InetSocketAddress bound = (InetSocketAddress) server.getLocalAddress(); if (bound.getAddress() instanceof Inet6Address) { Files.write(tmp, Collections.singleton("[" + bound.getHostString() + "]:" + bound.getPort())); } else { Files.write(tmp, Collections.singleton(bound.getHostString() + ":" + bound.getPort())); } - Files.move(tmp, Paths.get(args[0]), StandardCopyOption.ATOMIC_MOVE); + Files.move(tmp, dir.resolve("ports"), StandardCopyOption.ATOMIC_MOVE); + // go time server.accept(null, new CompletionHandler() { @Override @@ -66,6 +76,7 @@ public class ExampleTestFixture { @Override public void failed(Throwable exc, Void attachment) {} }); + // wait forever, until you kill me Thread.sleep(Long.MAX_VALUE); } diff --git a/test/framework/build.gradle b/test/framework/build.gradle index a2c568f1d7f..1c44cca344c 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import org.elasticsearch.gradle.precommit.PrecommitTasks - -apply plugin: 'elasticsearch.build' dependencies { compile "org.elasticsearch:elasticsearch:${version}" @@ -36,15 +33,5 @@ dependencies { compileJava.options.compilerArgs << '-Xlint:-cast,-deprecation,-fallthrough,-overrides,-rawtypes,-serial,-try,-unchecked' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' -// the main files are actually test files, so use the appopriate forbidden api sigs -forbiddenApisMain { - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] - signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/test-signatures.txt')] -} - -// TODO: should we have licenses for our test deps? -dependencyLicenses.enabled = false - // we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! thirdPartyAudit.missingClasses = true From 323111b715eb384337c1188d6ca52555127c1acf Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 20 Dec 2015 01:49:28 +0200 Subject: [PATCH 169/322] [DOC] simplify docs for repository-hdfs --- docs/plugins/repository-hdfs.asciidoc | 71 +++++---------------------- 1 file changed, 11 insertions(+), 60 deletions(-) diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index 114dbf13035..53052604514 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -8,29 +8,25 @@ The HDFS repository plugin adds support for using HDFS File System as a reposito [float] ==== Installation -This plugin can be installed using the plugin manager using _one_ of the following packages: +This plugin can be installed through the plugin manager: [source,sh] ---------------------------------------------------------------- sudo bin/plugin install repository-hdfs -sudo bin/plugin install repository-hdfs-hadoop2 -sudo bin/plugin install repository-hdfs-lite ---------------------------------------------------------------- -The chosen plugin must be installed on every node in the cluster, and each node must +The plugin must be installed on _every_ node in the cluster, and each node must be restarted after installation. [[repository-hdfs-remove]] [float] ==== Removal -The plugin can be removed by specifying the _installed_ package using _one_ of the following commands: +The plugin can be removed by specifying the _installed_ package: [source,sh] ---------------------------------------------------------------- sudo bin/plugin remove repository-hdfs -sudo bin/plugin remove repository-hdfs-hadoop2 -sudo bin/plugin remove repository-hdfs-lite ---------------------------------------------------------------- The node must be stopped before removing the plugin. @@ -38,49 +34,15 @@ The node must be stopped before removing the plugin. [[repository-hdfs-usage]] ==== Getting started with HDFS -The HDFS snapshot/restore plugin comes in three _flavors_: +The HDFS snapshot/restore plugin is built against the latest Apache Hadoop 2.x (currently 2.7.1). If the distro you are using is not protocol +compatible with Apache Hadoop, consider replacing the Hadoop libraries inside the plugin folder with your own (you might have to adjust the security permissions required). -* Default / Hadoop 1.x:: -The default version contains the plugin jar alongside Apache Hadoop 1.x (stable) dependencies. -* YARN / Hadoop 2.x:: -The `hadoop2` version contains the plugin jar plus the Apache Hadoop 2.x (also known as YARN) dependencies. -* Lite:: -The `lite` version contains just the plugin jar, without any Hadoop dependencies. The user should provide these (read below). +Even if Hadoop is already installed on the Elasticsearch nodes, for security reasons, the required libraries need to be placed under the plugin folder. +Note that in most cases, if the distro is compatible, one simply needs to configure the repository with the appropriate Hadoop configuration files (see below). -[[repository-hdfs-flavor]] -===== What version to use? - -It depends on whether Hadoop is locally installed or not and if not, whether it is compatible with Apache Hadoop clients. - -* Are you using Apache Hadoop (or a _compatible_ distro) and do not have installed on the Elasticsearch nodes?:: -+ -If the answer is yes, for Apache Hadoop 1 use the default `repository-hdfs` or `repository-hdfs-hadoop2` for Apache Hadoop 2. -+ -* If you are have Hadoop installed locally on the Elasticsearch nodes or are using a certain distro:: -+ -Use the `lite` version and place your Hadoop _client_ jars and their dependencies in the plugin folder under `hadoop-libs`. -For large deployments, it is recommended to package the libraries in the plugin zip and deploy it manually across nodes -(and thus avoiding having to do the libraries setup on each node). - -[[repository-hdfs-security]] -==== Handling JVM Security and Permissions - -Out of the box, Elasticsearch runs in a JVM with the security manager turned _on_ to make sure that unsafe or sensitive actions -are allowed only from trusted code. Hadoop however is not really designed to run under one; it does not rely on privileged blocks -to execute sensitive code, of which it uses plenty. - -The `repository-hdfs` plugin provides the necessary permissions for both Apache Hadoop 1.x and 2.x (latest versions) to successfully -run in a secured JVM as one can tell from the number of permissions required when installing the plugin. -However using a certain Hadoop File-System (outside DFS), a certain distro or operating system (in particular Windows), might require -additional permissions which are not provided by the plugin. - -In this case there are several workarounds: -* add the permission into `plugin-security.policy` (available in the plugin folder) - -* disable the security manager through `es.security.manager.enabled=false` configurations setting - NOT RECOMMENDED - -If you find yourself in such a situation, please let us know what Hadoop distro version and OS you are using and what permission is missing -by raising an issue. Thank you! +Windows Users:: +Using Apache Hadoop on Windows is problematic and thus it is not recommended. For those _really_ wanting to use it, make sure you place the elusive `winutils.exe` under the +plugin folder and point `HADOOP_HOME` variable to it; this should minimize the amount of permissions Hadoop requires (though one would still have to add some more). [[repository-hdfs-config]] ==== Configuration Properties @@ -104,15 +66,4 @@ repositories ---- NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while -others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead. - -[[repository-hdfs-other-fs]] -==== Plugging other file-systems - -Any HDFS-compatible file-systems (like Amazon `s3://` or Google `gs://`) can be used as long as the proper Hadoop -configuration is passed to the Elasticsearch plugin. In practice, this means making sure the correct Hadoop configuration -files (`core-site.xml` and `hdfs-site.xml`) and its jars are available in plugin classpath, just as you would with any -other Hadoop client or job. - -Otherwise, the plugin will only read the _default_, vanilla configuration of Hadoop and will not be able to recognized -the plugged-in file-system. +others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead. \ No newline at end of file From 3204e872201efa343c0a9081f50762ea239df0fa Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 20 Dec 2015 15:53:18 +0200 Subject: [PATCH 170/322] Restrict usage to HDFS only --- docs/plugins/repository-hdfs.asciidoc | 10 +-- .../plugin/hadoop/hdfs/HdfsPlugin.java | 1 - .../repositories/hdfs/HdfsBlobStore.java | 4 +- .../repositories/hdfs/HdfsRepository.java | 69 +++++++++++-------- .../resources/hadoop-libs/README.asciidoc | 1 - .../plugin/hadoop/hdfs/HdfsTests.java | 66 ++++++++++++++---- 6 files changed, 100 insertions(+), 51 deletions(-) delete mode 100644 plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index 53052604514..28abaf78f66 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -37,8 +37,7 @@ The node must be stopped before removing the plugin. The HDFS snapshot/restore plugin is built against the latest Apache Hadoop 2.x (currently 2.7.1). If the distro you are using is not protocol compatible with Apache Hadoop, consider replacing the Hadoop libraries inside the plugin folder with your own (you might have to adjust the security permissions required). -Even if Hadoop is already installed on the Elasticsearch nodes, for security reasons, the required libraries need to be placed under the plugin folder. -Note that in most cases, if the distro is compatible, one simply needs to configure the repository with the appropriate Hadoop configuration files (see below). +Even if Hadoop is already installed on the Elasticsearch nodes, for security reasons, the required libraries need to be placed under the plugin folder. Note that in most cases, if the distro is compatible, one simply needs to configure the repository with the appropriate Hadoop configuration files (see below). Windows Users:: Using Apache Hadoop on Windows is problematic and thus it is not recommended. For those _really_ wanting to use it, make sure you place the elusive `winutils.exe` under the @@ -54,8 +53,8 @@ Once installed, define the configuration for the `hdfs` repository through `elas ---- repositories hdfs: - uri: "hdfs://:/" \# optional - Hadoop file-system URI - path: "some/path" \# required - path with the file-system where data is stored/loaded + uri: "hdfs://:/" \# required - HDFS address only + path: "some/path" \# required - path within the file-system where data is stored/loaded load_defaults: "true" \# optional - whether to load the default Hadoop configuration (default) or not conf_location: "extra-cfg.xml" \# optional - Hadoop configuration XML to be loaded (use commas for multi values) conf. : "" \# optional - 'inlined' key=value added to the Hadoop configuration @@ -64,6 +63,3 @@ repositories chunk_size: "10mb" \# optional - chunk size (disabled by default) ---- - -NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while -others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead. \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index ba6d840c181..a14ed793e1e 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -62,7 +62,6 @@ public class HdfsPlugin extends Plugin { return "HDFS Repository Plugin"; } - @SuppressWarnings("unchecked") public void onModule(RepositoriesModule repositoriesModule) { repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class); } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 9c6dac7b68a..815c0d1eff3 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -40,9 +40,9 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { private final ThreadPool threadPool; private final int bufferSizeInBytes; - public HdfsBlobStore(Settings settings, FileContextFactory ffs, Path path, ThreadPool threadPool) throws IOException { + public HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path, ThreadPool threadPool) throws IOException { super(settings); - this.fcf = ffs; + this.fcf = fcf; this.rootHdfsPath = path; this.threadPool = threadPool; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index b00c72bad8d..75aac89b987 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -18,27 +18,6 @@ */ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.AbstractFileSystem; -import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; -import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.threadpool.ThreadPool; - import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; @@ -51,6 +30,26 @@ import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.AbstractFileSystem; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.repositories.RepositoryName; +import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.threadpool.ThreadPool; + public class HdfsRepository extends BlobStoreRepository implements FileContextFactory { public final static String TYPE = "hdfs"; @@ -61,6 +60,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa private final RepositorySettings repositorySettings; private final ThreadPool threadPool; private final String path; + private final String uri; private FileContext fc; private HdfsBlobStore blobStore; @@ -71,6 +71,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa this.repositorySettings = repositorySettings; this.threadPool = threadPool; + uri = repositorySettings.settings().get("uri", settings.get("uri")); path = repositorySettings.settings().get("path", settings.get("path")); @@ -81,9 +82,25 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa @Override protected void doStart() { + if (!Strings.hasText(uri)) { + throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); + } + + URI actualUri = URI.create(uri); + String scheme = actualUri.getScheme(); + if (!Strings.hasText(scheme) || !scheme.toLowerCase(Locale.ROOT).equals("hdfs")) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", scheme, uri)); + } + String p = actualUri.getPath(); + if (Strings.hasText(p) && !p.equals("/")) { + throw new IllegalArgumentException(String.format(Locale.ROOT, + "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", p, uri)); + } + // get configuration if (path == null) { - throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore"); + throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } try { fc = getFileContext(); @@ -186,13 +203,10 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot initialize Hadoop"), th); } - String uri = repositorySettings.settings().get("uri", settings.get("uri")); - URI actualUri = (uri != null ? URI.create(uri) : null); - + URI actualUri = URI.create(uri); try { // disable FS cache - String disableFsCache = String.format(Locale.ROOT, "fs.%s.impl.disable.cache", actualUri.getScheme()); - cfg.setBoolean(disableFsCache, true); + cfg.setBoolean("fs.hdfs.impl.disable.cache", true); // create the AFS manually since through FileContext is relies on Subject.doAs for no reason at all AbstractFileSystem fs = AbstractFileSystem.get(actualUri, cfg); @@ -202,7 +216,6 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa } } - @SuppressForbidden(reason = "pick up Hadoop config (which can be on HDFS)") private void addConfigLocation(Configuration cfg, String confLocation) { URL cfgURL = null; // it's an URL diff --git a/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc b/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc deleted file mode 100644 index e9f85f3cdf7..00000000000 --- a/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc +++ /dev/null @@ -1 +0,0 @@ -Folder containing the required Hadoop client libraries and dependencies. \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java index 9728c58203d..d7c1a37c03d 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java @@ -18,6 +18,11 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +import java.util.Collection; + import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; @@ -26,17 +31,11 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import java.util.Collection; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; - @ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) public class HdfsTests extends ESIntegTestCase { @@ -51,8 +50,8 @@ public class HdfsTests extends ESIntegTestCase { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", "file:///") - .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) + .put("uri", "hdfs:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) .put("path", "foo") .put("conf", "additional-cfg.xml, conf-2.xml") .put("chunk_size", randomIntBetween(100, 1000) + "k") @@ -121,7 +120,6 @@ public class HdfsTests extends ESIntegTestCase { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); } - // RepositoryVerificationException.class public void testWrongPath() { Client client = client(); @@ -129,8 +127,8 @@ public class HdfsTests extends ESIntegTestCase { PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - .put("uri", "file:///") - .put("conf.fs.AbstractFileSystem.file.impl", TestingFs.class.getName()) + .put("uri", "hdfs:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) .put("path", "a@b$c#11:22") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean())) @@ -144,8 +142,52 @@ public class HdfsTests extends ESIntegTestCase { // expected } } + + public void testNonHdfsUri() { + Client client = client(); + try { + PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.settingsBuilder() + .put("uri", "file:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) + .put("path", "should-fail") + .put("chunk_size", randomIntBetween(100, 1000) + "k") + .put("compress", randomBoolean())) + .get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + + createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + ensureGreen(); + fail("Path name is invalid"); + } catch (RepositoryException re) { + // expected + } + } + + public void testPathSpecifiedInHdfs() { + Client client = client(); + try { + PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.settingsBuilder() + .put("uri", "hdfs:///some/path") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) + .put("path", "should-fail") + .put("chunk_size", randomIntBetween(100, 1000) + "k") + .put("compress", randomBoolean())) + .get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + + createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + ensureGreen(); + fail("Path name is invalid"); + } catch (RepositoryException re) { + // expected + } + } private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} +} \ No newline at end of file From a9e9dc7d880e407deef7b35e0bd2e25ee97687bb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 20 Dec 2015 15:13:14 +0100 Subject: [PATCH 171/322] Disable tests for example-fixture --- test/fixtures/example-fixture/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/test/fixtures/example-fixture/build.gradle b/test/fixtures/example-fixture/build.gradle index 4c94aa93be4..17a4586a54d 100644 --- a/test/fixtures/example-fixture/build.gradle +++ b/test/fixtures/example-fixture/build.gradle @@ -18,3 +18,4 @@ */ apply plugin: 'elasticsearch.build' +test.enabled = false From 12a8428dfbbd47b268b7c4b89d568c7558584671 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 16:00:37 -0500 Subject: [PATCH 172/322] Add MiniHDFS test fixture, started before integTest and shut down after. Currently uses a hardcoded port (9999), need to apply MavenFilteringHack after it starts. --- .../elasticsearch/gradle/BuildPlugin.groovy | 8 ++ plugins/repository-hdfs/build.gradle | 18 ++++ .../test/hdfs_repository/20_repository.yaml | 2 +- .../test/hdfs_repository/30_snapshot.yaml | 2 +- .../test/hdfs_repository/40_restore.yaml | 2 +- settings.gradle | 1 + test/fixtures/hdfs-fixture/build.gradle | 39 +++++++++ .../src/main/java/hdfs/MiniHDFS.java | 86 +++++++++++++++++++ 8 files changed, 155 insertions(+), 3 deletions(-) create mode 100644 test/fixtures/hdfs-fixture/build.gradle create mode 100644 test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index eef79bba737..b8fd793ef23 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -198,6 +198,10 @@ class BuildPlugin implements Plugin { * to iterate the transitive dependencies and add excludes. */ static void configureConfigurations(Project project) { + // we are not shipping these jars, we act like dumb consumers of these things + if (project.path.startsWith(':test:fixtures')) { + return + } // fail on any conflicting dependency versions project.configurations.all({ Configuration configuration -> if (configuration.name.startsWith('_transitive_')) { @@ -205,6 +209,10 @@ class BuildPlugin implements Plugin { // we just have them to find *what* transitive deps exist return } + if (configuration.name.endsWith('Fixture')) { + // just a self contained test-fixture configuration, likely transitive and hellacious + return + } configuration.resolutionStrategy.failOnVersionConflict() }) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index dfceeec14e0..e35a2dc8052 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -28,6 +28,10 @@ versions << [ 'hadoop2': '2.7.1' ] +configurations { + hdfsFixture +} + dependencies { compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" @@ -44,12 +48,26 @@ dependencies { compile 'commons-lang:commons-lang:2.6' compile 'javax.servlet:servlet-api:2.5' // we need this one, its not really 'provided' compile 'org.slf4j:slf4j-api:${versions.slf4j}' + + hdfsFixture project(':test:fixtures:hdfs-fixture') } dependencyLicenses { mapping from: /hadoop-.*/, to: 'hadoop' } +task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { + dependsOn project.configurations.hdfsFixture + executable = new File(project.javaHome, 'bin/java') + args '-cp', "${ -> project.configurations.hdfsFixture.asPath }", + 'hdfs.MiniHDFS', + baseDir +} + +integTest { + dependsOn hdfsFixture +} + compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' thirdPartyAudit.missingClasses = true diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index 92fa3aba44c..e4e004c396e 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -9,7 +9,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:40737" + uri: "hdfs://localhost:9999" path: "foo/bar" # Get repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index 38aed590f98..e56dc88f5e9 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -11,7 +11,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:40737" + uri: "hdfs://localhost:9999" path: "foo/bar" - do: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index b1a8f78b699..a73219fcd26 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -11,7 +11,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:40737" + uri: "hdfs://localhost:9999" path: "foo/bar" - do: diff --git a/settings.gradle b/settings.gradle index e2c63dcfed5..55126b3c808 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,6 +10,7 @@ List projects = [ 'distribution:rpm', 'test:framework', 'test:fixtures:example-fixture', + 'test:fixtures:hdfs-fixture', 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle new file mode 100644 index 00000000000..a5dd2fa37cf --- /dev/null +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.build' + +versions << [ + 'hadoop2': '2.7.1' +] + +// we create MiniHdfsCluster which needs hadoop-hdfs and hadoop-common test jars. +// tests jars don't bring in their dependencies, so we need their normal jars too. +dependencies { + compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}:tests" + compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" + + compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}:tests" + compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" +} + +// just a test fixture: we aren't using jars in releases +thirdPartyAudit.enabled = false +// TODO: add a simple HDFS client test for this fixture +test.enabled = false diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java new file mode 100644 index 00000000000..76be664c1da --- /dev/null +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package hdfs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; + +import org.apache.log4j.BasicConfigurator; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Locale; + +import java.lang.management.ManagementFactory; + +/** + * MiniHDFS test fixture. There is a CLI tool, but here we can + * easily properly setup logging, avoid parsing JSON, etc. + */ +public class MiniHDFS { + + private static String PORT_FILE_NAME = "ports"; + private static String PID_FILE_NAME = "pid"; + + public static void main(String[] args) throws Exception { + if (args.length != 1) { + throw new IllegalArgumentException("MiniHDFS "); + } + + // configure logging, so we see all HDFS server logs if something goes wrong + BasicConfigurator.configure(); + + // configure Paths + Path baseDir = Paths.get(args[0]); + // hadoop-home/, so logs will not complain + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + // hdfs-data/, where any data is going + Path hdfsHome = baseDir.resolve("hdfs-data"); + + // start cluster + Configuration cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); + // lower default permission: TODO: needed? + cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); + // TODO: remove hardcoded port! + MiniDFSCluster dfs = new MiniDFSCluster.Builder(cfg).nameNodePort(9999).build(); + + // write our PID file + Path tmp = Files.createTempFile(baseDir, null, null); + String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + + // write our port file + tmp = Files.createTempFile(baseDir, null, null); + Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + + // don't rely on hadoop thread leaks, wait forever, until you kill me + Thread.sleep(Long.MAX_VALUE); + } +} From 2347e3c3730a6f35fe7767b9c86cff2da9e61950 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 16:17:17 -0500 Subject: [PATCH 173/322] Get forbidden apis passing again, this needs to be investigated --- .../org/elasticsearch/repositories/hdfs/HdfsRepository.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 75aac89b987..7fbbcdac2ac 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -39,6 +39,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; @@ -216,6 +217,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa } } + @SuppressForbidden(reason = "Where is this reading configuration files from? It should use Environment for ES conf dir") private void addConfigLocation(Configuration cfg, String confLocation) { URL cfgURL = null; // it's an URL @@ -283,4 +285,4 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa // to handle it? fc = null; } -} \ No newline at end of file +} From ee546ff6556990cb4b98cc94eb509f11ae39ed0d Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 17:10:01 -0500 Subject: [PATCH 174/322] try to get windows working --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 55 +++++++++++++------ 1 file changed, 38 insertions(+), 17 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index a14ed793e1e..dd8f817fec3 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -18,10 +18,14 @@ */ package org.elasticsearch.plugin.hadoop.hdfs; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; @@ -33,23 +37,40 @@ public class HdfsPlugin extends Plugin { // initialize some problematic classes with elevated privileges static { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - try { - Class.forName("org.apache.hadoop.security.UserGroupInformation"); - Class.forName("org.apache.hadoop.util.StringUtils"); - Class.forName("org.apache.hadoop.util.ShutdownHookManager"); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - return null; - } - }); + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + return evilHadoopInit(); + } + }); + } + + @SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed") + private static Void evilHadoopInit() { + String oldValue = null; + try { + // hack: on Windows, Shell's cinit has a similar problem that on unix, + // but here we can workaround it for now by setting hadoop home + // TODO: remove THIS when hadoop is fixed + Path hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); + oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); + Class.forName("org.apache.hadoop.security.UserGroupInformation"); + Class.forName("org.apache.hadoop.util.StringUtils"); + Class.forName("org.apache.hadoop.util.ShutdownHookManager"); + } catch (ClassNotFoundException | IOException e) { + throw new RuntimeException(e); + } finally { + if (oldValue == null) { + System.clearProperty("hadoop.home.dir"); + } else { + System.setProperty("hadoop.home.dir", oldValue); + } + } + return null; } @Override From a37417085d79e2295acb081d996005f1c99f0feb Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 18:31:55 -0500 Subject: [PATCH 175/322] blind stab at unit test issues on windows --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 2 +- .../plugin/hadoop/hdfs/TestingFs.java | 85 ++++++++++++++++--- 2 files changed, 72 insertions(+), 15 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index dd8f817fec3..8981fd02b58 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -53,7 +53,7 @@ public class HdfsPlugin extends Plugin { private static Void evilHadoopInit() { String oldValue = null; try { - // hack: on Windows, Shell's cinit has a similar problem that on unix, + // hack: on Windows, Shell's clinit has a similar problem that on unix, // but here we can workaround it for now by setting hadoop home // TODO: remove THIS when hadoop is fixed Path hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java index 548ac75af72..d39afe9cb74 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java @@ -21,14 +21,20 @@ package org.elasticsearch.plugin.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DelegateToFileSystem; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.lucene.util.LuceneTestCase; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.spi.FileSystemProvider; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; /** * Extends LFS to improve some operations to keep the security permissions at @@ -36,25 +42,76 @@ import java.net.URISyntaxException; */ public class TestingFs extends DelegateToFileSystem { - private static class ImprovedRawLocalFileSystem extends RawLocalFileSystem { - @Override - public Path getInitialWorkingDirectory() { - // sets working dir to a tmp dir for testing - return new Path(LuceneTestCase.createTempDir().toString()); - } - - @Override - public void setPermission(Path p, FsPermission permission) { - // no execution, thank you very much! - } + // wrap hadoop rawlocalfilesystem to behave less crazy + static RawLocalFileSystem wrap(final Path base) { + final FileSystemProvider baseProvider = base.getFileSystem().provider(); + return new RawLocalFileSystem() { + + private org.apache.hadoop.fs.Path box(Path path) { + return new org.apache.hadoop.fs.Path(path.toUri()); + } + + private Path unbox(org.apache.hadoop.fs.Path path) { + return baseProvider.getPath(path.toUri()); + } + + @Override + protected org.apache.hadoop.fs.Path getInitialWorkingDirectory() { + return box(base); + } + + @Override + public void setPermission(org.apache.hadoop.fs.Path path, FsPermission permission) { + // no execution, thank you very much! + } + + // pretend we don't support symlinks (which causes hadoop to want to do crazy things), + // returning the boolean does not seem to really help, link-related operations are still called. + + @Override + public boolean supportsSymlinks() { + return false; + } + + @Override + public FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path path) throws IOException { + return getFileStatus(path); + } + + @Override + public org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path path) throws IOException { + return path; + } + + @Override + public FileStatus getFileStatus(org.apache.hadoop.fs.Path path) throws IOException { + BasicFileAttributes attributes; + try { + attributes = Files.readAttributes(unbox(path), BasicFileAttributes.class); + } catch (NoSuchFileException e) { + // unfortunately, specific exceptions are not guaranteed. don't wrap hadoop over a zip filesystem or something. + FileNotFoundException fnfe = new FileNotFoundException("File " + path + " does not exist"); + fnfe.initCause(e); + throw fnfe; + } + + // we set similar values to raw local filesystem, except we are never a symlink + long length = attributes.size(); + boolean isDir = attributes.isDirectory(); + int blockReplication = 1; + long blockSize = getDefaultBlockSize(path); + long modificationTime = attributes.creationTime().toMillis(); + return new FileStatus(length, isDir, blockReplication, blockSize, modificationTime, path); + } + }; } public TestingFs(URI uri, Configuration configuration) throws URISyntaxException, IOException { - super(URI.create("file:///"), new ImprovedRawLocalFileSystem(), configuration, "file", false); + super(URI.create("file:///"), wrap(LuceneTestCase.createTempDir()), configuration, "file", false); } @Override - public void checkPath(Path path) { + public void checkPath(org.apache.hadoop.fs.Path path) { // we do evil stuff, we admit it. } } From 08d1d2f1924dc6cdd8e41abac0f7e851900c1051 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 19:04:28 -0500 Subject: [PATCH 176/322] add simple javaexec for now until test fixtures can be debugged when they do not start --- test/fixtures/hdfs-fixture/build.gradle | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index a5dd2fa37cf..d9db99db2a8 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -33,6 +33,14 @@ dependencies { compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" } +// for testing, until fixtures are actually debuggable. +// gradle hides *EVERYTHING* so you have no clue what went wrong. +task hdfs(type: JavaExec) { + classpath = sourceSets.test.compileClasspath + sourceSets.test.output + main = "hdfs.MiniHDFS" + args = [ 'build/fixtures/hdfsFixture' ] +} + // just a test fixture: we aren't using jars in releases thirdPartyAudit.enabled = false // TODO: add a simple HDFS client test for this fixture From 03a2b6b01bca8516503228ac0cbe69091e3f7023 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 16:30:19 -0800 Subject: [PATCH 177/322] Disable HDFS fixture on windows, it requires native libraries. --- plugins/repository-hdfs/build.gradle | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index e35a2dc8052..e9e93b46d19 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -65,7 +65,17 @@ task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { } integTest { - dependsOn hdfsFixture + // hdfs fixture will not start without hadoop native libraries on windows + // so we can't run integration tests against external hadoop here. + if (System.getProperty("os.name").startsWith("Windows")) { + systemProperty 'tests.rest.blacklist', [ + 'hdfs_repository/20_repository/*', + 'hdfs_repository/30_snapshot/*', + 'hdfs_repository/40_restore/*' + ].join(',') + } else { + dependsOn hdfsFixture + } } compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' From a518599e27047549347ac58d66d66b7bfd27f988 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sun, 20 Dec 2015 17:57:42 -0800 Subject: [PATCH 178/322] Allow plugins to upgrade slf4j-api to a compile dep This was originally intended to be general purpose in #15555, but that still had problems. Instead, this change fixes the issue explicitly for slf4j-api, since that is the problematic dep that is not actually included in the distributions. --- .../elasticsearch/gradle/plugin/PluginBuildPlugin.groovy | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index db3f3eca50a..042e8d22529 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -22,6 +22,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project +import org.gradle.api.artifacts.Dependency import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -100,6 +101,11 @@ public class PluginBuildPlugin extends BuildPlugin { from pluginMetadata // metadata (eg custom security policy) from project.jar // this plugin's jar from project.configurations.runtime - project.configurations.provided // the dep jars + // hack just for slf4j, in case it is "upgrade" from provided to compile, + // since it is not actually provided in distributions + from project.configurations.runtime.fileCollection { Dependency dep -> + return dep.name == 'slf4j-api' && project.configurations.compile.dependencies.contains(dep) + } // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { From 04966bcc3e5b5ce5f397b69d8aeabaea4d626554 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 21:02:03 -0500 Subject: [PATCH 179/322] contain and improve hack --- .../plugin/hadoop/hdfs/HdfsPlugin.java | 31 +++++++++++++------ 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 8981fd02b58..07680e48a1a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -24,6 +24,7 @@ import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; +import org.apache.lucene.util.Constants; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; @@ -51,23 +52,35 @@ public class HdfsPlugin extends Plugin { @SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed") private static Void evilHadoopInit() { + // hack: on Windows, Shell's clinit has a similar problem that on unix, + // but here we can workaround it for now by setting hadoop home + // TODO: remove THIS when hadoop is fixed + Path hadoopHome = null; String oldValue = null; try { - // hack: on Windows, Shell's clinit has a similar problem that on unix, - // but here we can workaround it for now by setting hadoop home - // TODO: remove THIS when hadoop is fixed - Path hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); - oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); + if (Constants.WINDOWS) { + hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); + oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); + } Class.forName("org.apache.hadoop.security.UserGroupInformation"); Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.ShutdownHookManager"); } catch (ClassNotFoundException | IOException e) { throw new RuntimeException(e); } finally { - if (oldValue == null) { - System.clearProperty("hadoop.home.dir"); - } else { - System.setProperty("hadoop.home.dir", oldValue); + // try to clean up the hack + if (Constants.WINDOWS) { + if (oldValue == null) { + System.clearProperty("hadoop.home.dir"); + } else { + System.setProperty("hadoop.home.dir", oldValue); + } + try { + // try to clean up our temp dir too if we can + if (hadoopHome != null) { + Files.delete(hadoopHome); + } + } catch (IOException thisIsBestEffort) {} } } return null; From 935c2c75f67d91e5bf11ea96778f20f7366c6413 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 22:08:18 -0500 Subject: [PATCH 180/322] Remove slf4j hack --- distribution/build.gradle | 2 +- plugins/repository-hdfs/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 03b30848282..fcf22ff48dc 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -39,7 +39,7 @@ buildscript { } // this is common configuration for distributions, but we also add it here for the license check to use -ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive() // .exclude(module: 'slf4j-api') +ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api') /***************************************************************************** diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index e9e93b46d19..4bade1850f0 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -47,7 +47,7 @@ dependencies { compile 'commons-io:commons-io:2.4' compile 'commons-lang:commons-lang:2.6' compile 'javax.servlet:servlet-api:2.5' - // we need this one, its not really 'provided' compile 'org.slf4j:slf4j-api:${versions.slf4j}' + compile "org.slf4j:slf4j-api:${versions.slf4j}" hdfsFixture project(':test:fixtures:hdfs-fixture') } From 53530f1243e836594ef3b717d3924621a617f1a8 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 22:23:30 -0500 Subject: [PATCH 181/322] remove hacks, test fixtures are clean before each execution --- .../rest-api-spec/test/hdfs_repository/30_snapshot.yaml | 5 ----- .../rest-api-spec/test/hdfs_repository/40_restore.yaml | 5 ----- 2 files changed, 10 deletions(-) diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index e56dc88f5e9..a7af512666c 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -40,8 +40,3 @@ setup: - match: { snapshot.shards.successful: 1 } - match: { snapshot.shards.failed : 0 } - # ghetto teardown, so we can debug this thing with external hdfs - - do: - snapshot.delete: - repository: test_repo_hdfs_snapshot - snapshot: test_snapshot diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index a73219fcd26..2327b5da906 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -63,8 +63,3 @@ setup: - match: { test_index.shards.0.index.files.reused: 0} - match: { test_index.shards.0.index.size.reused_in_bytes: 0} - # ghetto teardown, so we can debug this thing with external hdfs - - do: - snapshot.delete: - repository: test_repo_hdfs_restore - snapshot: test_restore From 99f2cde22585b3079ff60bd0a9e10b3643272c90 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 22:30:41 -0500 Subject: [PATCH 182/322] Fail fast if HDFS cluster shuts itself down --- test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java index 76be664c1da..6ad4a970da7 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -79,8 +79,5 @@ public class MiniHDFS { tmp = Files.createTempFile(baseDir, null, null); Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - - // don't rely on hadoop thread leaks, wait forever, until you kill me - Thread.sleep(Long.MAX_VALUE); } } From e93c491dbec7d423ffd7bc0f38356dfc074fab52 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 20 Dec 2015 23:50:27 -0500 Subject: [PATCH 183/322] simplify hdfs fixture --- test/fixtures/hdfs-fixture/build.gradle | 9 ++------- .../hdfs-fixture/src/main/java/hdfs/MiniHDFS.java | 6 ------ 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index d9db99db2a8..3d63939f66e 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -23,14 +23,9 @@ versions << [ 'hadoop2': '2.7.1' ] -// we create MiniHdfsCluster which needs hadoop-hdfs and hadoop-common test jars. -// tests jars don't bring in their dependencies, so we need their normal jars too. +// we create MiniHdfsCluster with the hadoop artifact dependencies { - compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}:tests" - compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" - - compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}:tests" - compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-minicluster:${versions.hadoop2}" } // for testing, until fixtures are actually debuggable. diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java index 6ad4a970da7..f57d389cc90 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -23,8 +23,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.log4j.BasicConfigurator; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -48,10 +46,6 @@ public class MiniHDFS { if (args.length != 1) { throw new IllegalArgumentException("MiniHDFS "); } - - // configure logging, so we see all HDFS server logs if something goes wrong - BasicConfigurator.configure(); - // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain From f67390e0c8ed75faee0c45c392b116fae694710d Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 02:21:53 -0500 Subject: [PATCH 184/322] in the plugin: guard against HADOOP_HOME in environment on any platform. hdfs fixture: minihdfs works on windows now, if things are properly set but our test fixture still cannot launch this on windows. --- plugins/repository-hdfs/build.gradle | 33 ++++++++++++++++--- .../plugin/hadoop/hdfs/HdfsPlugin.java | 30 ++++++++--------- .../src/main/java/hdfs/MiniHDFS.java | 8 +++-- 3 files changed, 47 insertions(+), 24 deletions(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 4bade1850f0..f193fa2aba1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -18,6 +18,11 @@ */ //apply plugin: 'nebula.provided-base' + +import org.apache.tools.ant.taskdefs.condition.Os +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' @@ -65,16 +70,34 @@ task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { } integTest { - // hdfs fixture will not start without hadoop native libraries on windows - // so we can't run integration tests against external hadoop here. - if (System.getProperty("os.name").startsWith("Windows")) { + boolean fixtureSupported = false; + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + // hdfs fixture will not start without hadoop native libraries on windows + String nativePath = System.getenv("HADOOP_HOME") + if (nativePath != null) { + Path path = Paths.get(nativePath); + if (Files.isDirectory(path) && + Files.exists(path.resolve("bin").resolve("winutils.exe")) && + Files.exists(path.resolve("bin").resolve("hadoop.dll")) && + Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { + fixtureSupported = true + } else { + throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin"); + } + } + } else { + fixtureSupported = true + } + + if (fixtureSupported) { + dependsOn hdfsFixture + } else { + logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") systemProperty 'tests.rest.blacklist', [ 'hdfs_repository/20_repository/*', 'hdfs_repository/30_snapshot/*', 'hdfs_repository/40_restore/*' ].join(',') - } else { - dependsOn hdfsFixture } } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 07680e48a1a..87f4f6024d7 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -54,14 +54,14 @@ public class HdfsPlugin extends Plugin { private static Void evilHadoopInit() { // hack: on Windows, Shell's clinit has a similar problem that on unix, // but here we can workaround it for now by setting hadoop home + // on unix: we still want to set this to something we control, because + // if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom // TODO: remove THIS when hadoop is fixed Path hadoopHome = null; String oldValue = null; try { - if (Constants.WINDOWS) { - hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); - oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); - } + hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); + oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); Class.forName("org.apache.hadoop.security.UserGroupInformation"); Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.ShutdownHookManager"); @@ -69,19 +69,17 @@ public class HdfsPlugin extends Plugin { throw new RuntimeException(e); } finally { // try to clean up the hack - if (Constants.WINDOWS) { - if (oldValue == null) { - System.clearProperty("hadoop.home.dir"); - } else { - System.setProperty("hadoop.home.dir", oldValue); - } - try { - // try to clean up our temp dir too if we can - if (hadoopHome != null) { - Files.delete(hadoopHome); - } - } catch (IOException thisIsBestEffort) {} + if (oldValue == null) { + System.clearProperty("hadoop.home.dir"); + } else { + System.setProperty("hadoop.home.dir", oldValue); } + try { + // try to clean up our temp dir too if we can + if (hadoopHome != null) { + Files.delete(hadoopHome); + } + } catch (IOException thisIsBestEffort) {} } return null; } diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java index f57d389cc90..a4bf47f8eae 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -49,9 +49,11 @@ public class MiniHDFS { // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } // hdfs-data/, where any data is going Path hdfsHome = baseDir.resolve("hdfs-data"); From 776e5d8096c7ddea8996d8c70165f6971aadd776 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 8 Dec 2015 18:44:36 +0100 Subject: [PATCH 185/322] Separates routing and parent in all documentrequest in order to be able to distinguish an explicit routing value from a parent routing. Resolves conflicts between parent routing and alias routing with the following rule: * The parent routing is ignored if there is an alias routing that matches the request. Closes #3068 --- .../elasticsearch/action/DocumentRequest.java | 8 +++ .../action/bulk/TransportBulkAction.java | 2 +- .../action/delete/DeleteRequest.java | 20 ++++-- .../action/delete/TransportDeleteAction.java | 2 +- .../elasticsearch/action/get/GetRequest.java | 18 +++-- .../action/get/MultiGetRequest.java | 16 ++++- .../action/get/TransportGetAction.java | 2 +- .../action/get/TransportMultiGetAction.java | 2 +- .../action/index/IndexRequest.java | 8 +-- .../termvectors/TermVectorsRequest.java | 20 ++++-- .../TransportMultiTermVectorsAction.java | 6 +- .../TransportTermVectorsAction.java | 4 +- .../action/update/TransportUpdateAction.java | 2 +- .../action/update/UpdateRequest.java | 5 +- .../cluster/metadata/MetaData.java | 20 ++++-- .../termvectors/TermVectorsUnitTests.java | 2 +- .../cluster/metadata/MetaDataTests.java | 69 +++++++++++++++++++ .../routing/AliasResolveRoutingIT.java | 23 ++++--- docs/reference/docs/update.asciidoc | 1 + docs/reference/indices/aliases.asciidoc | 7 +- 20 files changed, 180 insertions(+), 57 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/DocumentRequest.java b/core/src/main/java/org/elasticsearch/action/DocumentRequest.java index fcfea39ab54..a90f013a6b9 100644 --- a/core/src/main/java/org/elasticsearch/action/DocumentRequest.java +++ b/core/src/main/java/org/elasticsearch/action/DocumentRequest.java @@ -62,4 +62,12 @@ public interface DocumentRequest extends IndicesRequest { * @return the Routing */ String routing(); + + + /** + * Get the parent for this request + * @return the Parent + */ + String parent(); + } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 9b18d0328e7..7252993427f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -239,7 +239,7 @@ public class TransportBulkAction extends HandledTransportAction implements private String id; @Nullable private String routing; + @Nullable + private String parent; private boolean refresh; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; @@ -94,6 +96,7 @@ public class DeleteRequest extends ReplicationRequest implements this.type = request.type(); this.id = request.id(); this.routing = request.routing(); + this.parent = request.parent(); this.refresh = request.refresh(); this.version = request.version(); this.versionType = request.versionType(); @@ -155,13 +158,18 @@ public class DeleteRequest extends ReplicationRequest implements } /** - * Sets the parent id of this document. Will simply set the routing to this value, as it is only - * used for routing with delete requests. + * @return The parent for this request. + */ + @Override + public String parent() { + return parent; + } + + /** + * Sets the parent id of this document. */ public DeleteRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -230,6 +238,7 @@ public class DeleteRequest extends ReplicationRequest implements type = in.readString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); refresh = in.readBoolean(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); @@ -241,6 +250,7 @@ public class DeleteRequest extends ReplicationRequest implements out.writeString(type); out.writeString(id); out.writeOptionalString(routing()); + out.writeOptionalString(parent()); out.writeBoolean(refresh); out.writeLong(version); out.writeByte(versionType.getValue()); diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index ca66b285753..f80b1a24396 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -95,7 +95,7 @@ public class TransportDeleteAction extends TransportReplicationAction implements Realti private String type; private String id; private String routing; + private String parent; private String preference; private String[] fields; @@ -77,6 +78,7 @@ public class GetRequest extends SingleShardRequest implements Realti this.type = getRequest.type; this.id = getRequest.id; this.routing = getRequest.routing; + this.parent = getRequest.parent; this.preference = getRequest.preference; this.fields = getRequest.fields; this.fetchSourceContext = getRequest.fetchSourceContext; @@ -153,13 +155,17 @@ public class GetRequest extends SingleShardRequest implements Realti } /** - * Sets the parent id of this document. Will simply set the routing to this value, as it is only - * used for routing with delete requests. + * @return The parent for this request. + */ + public String parent() { + return parent; + } + + /** + * Sets the parent id of this document. */ public GetRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -291,6 +297,7 @@ public class GetRequest extends SingleShardRequest implements Realti type = in.readString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); preference = in.readOptionalString(); refresh = in.readBoolean(); int size = in.readInt(); @@ -320,6 +327,7 @@ public class GetRequest extends SingleShardRequest implements Realti out.writeString(type); out.writeString(id); out.writeOptionalString(routing); + out.writeOptionalString(parent); out.writeOptionalString(preference); out.writeBoolean(refresh); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index a6af7050fd7..2843c2b2573 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -49,6 +49,7 @@ public class MultiGetRequest extends ActionRequest implements I private String type; private String id; private String routing; + private String parent; private String[] fields; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; @@ -116,12 +117,17 @@ public class MultiGetRequest extends ActionRequest implements I } public Item parent(String parent) { - if (routing == null) { - this.routing = parent; - } + this.parent = parent; return this; } + /** + * @return The parent for this request. + */ + public String parent() { + return parent; + } + public Item fields(String... fields) { this.fields = fields; return this; @@ -173,6 +179,7 @@ public class MultiGetRequest extends ActionRequest implements I type = in.readOptionalString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); int size = in.readVInt(); if (size > 0) { fields = new String[size]; @@ -192,6 +199,7 @@ public class MultiGetRequest extends ActionRequest implements I out.writeOptionalString(type); out.writeString(id); out.writeOptionalString(routing); + out.writeOptionalString(parent); if (fields == null) { out.writeVInt(0); } else { @@ -221,6 +229,7 @@ public class MultiGetRequest extends ActionRequest implements I if (!id.equals(item.id)) return false; if (!index.equals(item.index)) return false; if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false; + if (parent != null ? !parent.equals(item.parent) : item.parent != null) return false; if (type != null ? !type.equals(item.type) : item.type != null) return false; if (versionType != item.versionType) return false; @@ -233,6 +242,7 @@ public class MultiGetRequest extends ActionRequest implements I result = 31 * result + (type != null ? type.hashCode() : 0); result = 31 * result + id.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); + result = 31 * result + (parent != null ? parent.hashCode() : 0); result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0); result = 31 * result + Long.hashCode(version); result = 31 * result + versionType.hashCode(); diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index a2cb9873474..2d6bafc9623 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -82,7 +82,7 @@ public class TransportGetAction extends TransportSingleShardAction implements Do } /** - * Sets the parent id of this document. If routing is not set, automatically set it as the - * routing as well. + * Sets the parent id of this document. */ public IndexRequest parent(String parent) { this.parent = parent; - if (routing == null) { - routing = parent; - } return this; } @@ -593,7 +589,7 @@ public class IndexRequest extends ReplicationRequest implements Do public void process(MetaData metaData, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) { // resolve the routing if needed - routing(metaData.resolveIndexRouting(routing, index)); + routing(metaData.resolveIndexRouting(parent, routing, index)); // resolve timestamp if provided externally if (timestamp != null) { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index c13e44097bc..7a97a242401 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -65,6 +65,8 @@ public class TermVectorsRequest extends SingleShardRequest i private String routing; + private String parent; + private VersionType versionType = VersionType.INTERNAL; private long version = Versions.MATCH_ANY; @@ -162,6 +164,7 @@ public class TermVectorsRequest extends SingleShardRequest i this.flagsEnum = other.getFlags().clone(); this.preference = other.preference(); this.routing = other.routing(); + this.parent = other.parent(); if (other.selectedFields != null) { this.selectedFields = new HashSet<>(other.selectedFields); } @@ -181,6 +184,7 @@ public class TermVectorsRequest extends SingleShardRequest i this.type = item.type(); this.selectedFields(item.fields()); this.routing(item.routing()); + this.parent(item.parent()); } public EnumSet getFlags() { @@ -259,14 +263,16 @@ public class TermVectorsRequest extends SingleShardRequest i return this; } + @Override + public String parent() { + return parent; + } + /** - * Sets the parent id of this document. Will simply set the routing to this - * value, as it is only used for routing with delete requests. + * Sets the parent id of this document. */ public TermVectorsRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -506,6 +512,7 @@ public class TermVectorsRequest extends SingleShardRequest i doc = in.readBytesReference(); } routing = in.readOptionalString(); + parent = in.readOptionalString(); preference = in.readOptionalString(); long flags = in.readVLong(); @@ -545,6 +552,7 @@ public class TermVectorsRequest extends SingleShardRequest i out.writeBytesReference(doc); } out.writeOptionalString(routing); + out.writeOptionalString(parent); out.writeOptionalString(preference); long longFlags = 0; for (Flag flag : flagsEnum) { @@ -629,6 +637,8 @@ public class TermVectorsRequest extends SingleShardRequest i termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser)); } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { termVectorsRequest.routing = parser.text(); + } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { + termVectorsRequest.parent = parser.text(); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { termVectorsRequest.version = parser.longValue(); } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index dd78d7a3f65..3943d2e6a67 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -66,7 +66,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction() { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index b790c21a45a..98d085b9b97 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -71,8 +71,8 @@ public class TransportTermVectorsAction extends TransportSingleShardAction listener) { - request.routing((state.metaData().resolveIndexRouting(request.routing(), request.index()))); + request.routing((state.metaData().resolveIndexRouting(request.parent(), request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.type())) { throw new RoutingMissingException(request.concreteIndex(), request.type(), request.id()); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 06df386828e..9e061d29500 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -184,13 +184,10 @@ public class UpdateRequest extends InstanceShardOperationRequest } /** - * The parent id is used for the upsert request and also implicitely sets the routing if not already set. + * The parent id is used for the upsert request. */ public UpdateRequest parent(String parent) { this.parent = parent; - if (routing == null) { - routing = parent; - } return this; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 751f8a09ea5..418a2099004 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -441,13 +441,19 @@ public class MetaData implements Iterable, Diffable, Fr */ // TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions // in the index,bulk,update and delete apis. - public String resolveIndexRouting(@Nullable String routing, String aliasOrIndex) { + public String resolveIndexRouting(@Nullable String parent, @Nullable String routing, String aliasOrIndex) { if (aliasOrIndex == null) { + if (routing == null) { + return parent; + } return routing; } AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex); if (result == null || result.isAlias() == false) { + if (routing == null) { + return parent; + } return routing; } AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result; @@ -461,17 +467,19 @@ public class MetaData implements Iterable, Diffable, Fr } AliasMetaData aliasMd = alias.getFirstAliasMetaData(); if (aliasMd.indexRouting() != null) { + if (aliasMd.indexRouting().indexOf(',') != -1) { + throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation"); + } if (routing != null) { if (!routing.equals(aliasMd.indexRouting())) { throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation"); } } - routing = aliasMd.indexRouting(); + // Alias routing overrides the parent routing (if any). + return aliasMd.indexRouting(); } - if (routing != null) { - if (routing.indexOf(',') != -1) { - throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + routing + "] that resolved to several routing values, rejecting operation"); - } + if (routing == null) { + return parent; } return routing; } diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index cab27df6936..ec608e0bf54 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -255,7 +255,7 @@ public class TermVectorsUnitTests extends ESTestCase { assertThat(request.positions(), equalTo(req2.positions())); assertThat(request.termStatistics(), equalTo(req2.termStatistics())); assertThat(request.preference(), equalTo(pref)); - assertThat(request.routing(), equalTo(parent)); + assertThat(request.routing(), equalTo(null)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 59116859322..91a421ee420 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class MetaDataTests extends ESTestCase { @@ -41,4 +42,72 @@ public class MetaDataTests extends ESTestCase { } } + public void testResolveIndexRouting() { + IndexMetaData.Builder builder = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias0").build()) + .putAlias(AliasMetaData.builder("alias1").routing("1").build()) + .putAlias(AliasMetaData.builder("alias2").routing("1,2").build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // no alias, no index + assertEquals(metaData.resolveIndexRouting(null, null, null), null); + assertEquals(metaData.resolveIndexRouting(null, "0", null), "0"); + assertEquals(metaData.resolveIndexRouting("32", "0", null), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, null), "32"); + + // index, no alias + assertEquals(metaData.resolveIndexRouting("32", "0", "index"), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, "index"), "32"); + assertEquals(metaData.resolveIndexRouting(null, null, "index"), null); + assertEquals(metaData.resolveIndexRouting(null, "0", "index"), "0"); + + // alias with no index routing + assertEquals(metaData.resolveIndexRouting(null, null, "alias0"), null); + assertEquals(metaData.resolveIndexRouting(null, "0", "alias0"), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, "alias0"), "32"); + assertEquals(metaData.resolveIndexRouting("32", "0", "alias0"), "0"); + + // alias with index routing. + assertEquals(metaData.resolveIndexRouting(null, null, "alias1"), "1"); + assertEquals(metaData.resolveIndexRouting("32", null, "alias1"), "1"); + assertEquals(metaData.resolveIndexRouting("32", "1", "alias1"), "1"); + try { + metaData.resolveIndexRouting(null, "0", "alias1"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + } + + try { + metaData.resolveIndexRouting("32", "0", "alias1"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + } + + // alias with invalid index routing. + try { + metaData.resolveIndexRouting(null, null, "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + + try { + metaData.resolveIndexRouting(null, "1", "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + + try { + metaData.resolveIndexRouting("32", null, "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java index 2740dd73246..db21fef6930 100644 --- a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java @@ -51,24 +51,27 @@ public class AliasResolveRoutingIT extends ESIntegTestCase { client().admin().indices().prepareAliases().addAliasAction(newAddAliasAction("test1", "alias0").routing("0")).execute().actionGet(); client().admin().indices().prepareAliases().addAliasAction(newAddAliasAction("test2", "alias0").routing("0")).execute().actionGet(); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "test1"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "test1"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "test1"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias10"), equalTo("0")); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias20"), equalTo("0")); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias21"), equalTo("1")); - assertThat(clusterService().state().metaData().resolveIndexRouting("3", "test1"), equalTo("3")); - assertThat(clusterService().state().metaData().resolveIndexRouting("0", "alias10"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "test1"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias10"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias20"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias21"), equalTo("1")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, "3", "test1"), equalTo("3")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, "0", "alias10"), equalTo("0")); + + // Force the alias routing and ignore the parent. + assertThat(clusterService().state().metaData().resolveIndexRouting("1", null, "alias10"), equalTo("0")); try { - clusterService().state().metaData().resolveIndexRouting("1", "alias10"); + clusterService().state().metaData().resolveIndexRouting(null, "1", "alias10"); fail("should fail"); } catch (IllegalArgumentException e) { // all is well, we can't have two mappings, one provided, and one in the alias } try { - clusterService().state().metaData().resolveIndexRouting(null, "alias0"); + clusterService().state().metaData().resolveIndexRouting(null, null, "alias0"); fail("should fail"); } catch (IllegalArgumentException ex) { // Expected diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 7d2dbfaeede..25a4f34fa46 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -223,6 +223,7 @@ Can't be used to update the routing of an existing document. Parent is used to route the update request to the right shard and sets the parent for the upsert request if the document being updated doesn't exist. Can't be used to update the `parent` of an existing document. +If an alias index routing is specified then it overrides the parent routing and it is used to route the request. `timeout`:: diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 57faa9718f9..78e871de232 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -193,8 +193,8 @@ curl -XPOST 'http://localhost:9200/_aliases' -d ' As shown in the example above, search routing may contain several values separated by comma. Index routing can contain only a single value. -If an operation that uses routing alias also has a routing parameter, an -intersection of both alias routing and routing specified in the +If a search operation that uses routing alias also has a routing parameter, an +intersection of both search alias routing and routing specified in the parameter is used. For example the following command will use "2" as a routing value: @@ -203,6 +203,9 @@ routing value: curl -XGET 'http://localhost:9200/alias2/_search?q=user:kimchy&routing=2,3' -------------------------------------------------- +If an index operation that uses index routing alias also has a parent routing, the +parent routing is ignored. + [float] [[alias-adding]] === Add a single alias From ac393b7a313c2b009a50f4c8e72b2a35c6b57fc3 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 18 Dec 2015 18:19:48 +0100 Subject: [PATCH 186/322] Make mappings tests more realistic. DocumentMapperParser has both parse and parseCompressed methods. Except that the parse methods are ONLY used from the unit tests. This commit removes the parse method and moves all tests to parseCompressed so that they test more realistically how mappings are managed. Then I renamed parseCompressed to parse given that this is the only alternative anyway. --- .../index/mapper/DocumentMapperParser.java | 25 +------ .../index/mapper/MapperService.java | 4 +- .../org/elasticsearch/codecs/CodecTests.java | 5 +- .../index/analysis/PreBuiltAnalyzerTests.java | 3 +- .../fielddata/BinaryDVFieldDataTests.java | 3 +- .../index/fielddata/DuelFieldDataTests.java | 9 +-- .../index/mapper/DocumentParserTests.java | 5 +- .../index/mapper/DynamicMappingTests.java | 27 +++---- .../mapper/all/SimpleAllMapperTests.java | 57 +++++++-------- .../mapper/binary/BinaryMappingTests.java | 5 +- .../mapper/boost/CustomBoostMappingTests.java | 3 +- .../mapper/boost/FieldLevelBoostTests.java | 5 +- .../camelcase/CamelCaseFieldNameTests.java | 2 +- .../CompletionFieldMapperTests.java | 29 ++++---- .../mapper/compound/CompoundTypesTests.java | 3 +- .../mapper/copyto/CopyToMapperTests.java | 19 ++--- .../mapper/core/BooleanFieldMapperTests.java | 7 +- .../core/TokenCountFieldMapperTests.java | 5 +- .../mapper/date/SimpleDateMappingTests.java | 3 +- .../SimpleExternalMappingTests.java | 13 ++-- .../mapper/geo/GeoPointFieldMapperTests.java | 70 +++++++++--------- .../mapper/geo/GeoShapeFieldMapperTests.java | 30 ++++---- .../geo/GeohashMappingGeoPointTests.java | 13 ++-- .../index/mapper/id/IdMappingTests.java | 11 +-- .../mapper/index/IndexTypeMapperTests.java | 17 ++--- .../internal/FieldNamesFieldMapperTests.java | 23 +++--- .../mapper/internal/TypeFieldMapperTests.java | 5 +- .../index/mapper/ip/SimpleIpMappingTests.java | 7 +- .../lucene/StoredNumericValuesTests.java | 3 +- .../mapper/merge/TestMergeMapperTests.java | 20 +++--- .../mapper/multifield/MultiFieldTests.java | 23 +++--- .../merge/JavaMultiFieldMergeTests.java | 8 +-- .../mapper/nested/NestedMappingTests.java | 15 ++-- .../mapper/null_value/NullValueTests.java | 4 +- .../mapper/numeric/SimpleNumericTests.java | 25 +++---- .../object/NullValueObjectMappingTests.java | 3 +- .../object/SimpleObjectMappingTests.java | 15 ++-- .../mapper/parent/ParentMappingTests.java | 7 +- .../index/mapper/path/PathMapperTests.java | 3 +- .../routing/RoutingTypeMapperTests.java | 13 ++-- .../mapper/simple/SimpleMapperTests.java | 15 ++-- .../source/DefaultSourceMappingTests.java | 32 ++++----- .../string/SimpleStringMappingTests.java | 32 ++++----- .../timestamp/TimestampMappingTests.java | 72 +++++++++---------- .../index/mapper/ttl/TTLMappingTests.java | 24 +++---- .../ParseDocumentTypeLevelsTests.java | 21 +++--- .../ParseMappingTypeLevelTests.java | 7 +- .../index/similarity/SimilarityTests.java | 13 ++-- .../CategoryContextMappingTests.java | 9 +-- .../completion/GeoContextMappingTests.java | 9 +-- .../DateAttachmentMapperTests.java | 3 +- .../attachments/EncryptedDocMapperTests.java | 7 +- ...anguageDetectionAttachmentMapperTests.java | 3 +- .../attachments/MetadataMapperTests.java | 3 +- .../MultifieldAttachmentMapperTests.java | 5 +- .../SimpleAttachmentMapperTests.java | 10 +-- .../mapper/attachments/StandaloneRunner.java | 3 +- .../mapper/attachments/VariousDocTests.java | 3 +- .../murmur3/Murmur3FieldMapperTests.java | 15 ++-- .../index/mapper/size/SizeMappingTests.java | 13 ++-- 60 files changed, 435 insertions(+), 411 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 29e3fa4e456..d7cc5eb8c93 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -76,32 +76,11 @@ public class DocumentMapperParser { return new Mapper.TypeParser.ParserContext(type, analysisService, similarityService::getSimilarity, mapperService, typeParsers::get, indexVersionCreated, parseFieldMatcher); } - public DocumentMapper parse(String source) throws MapperParsingException { - return parse(null, source); - } - - public DocumentMapper parse(@Nullable String type, String source) throws MapperParsingException { + public DocumentMapper parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { return parse(type, source, null); } - public DocumentMapper parse(@Nullable String type, String source, String defaultSource) throws MapperParsingException { - Map mapping = null; - if (source != null) { - Tuple> t = extractMapping(type, source); - type = t.v1(); - mapping = t.v2(); - } - if (mapping == null) { - mapping = new HashMap<>(); - } - return parse(type, mapping, defaultSource); - } - - public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException { - return parseCompressed(type, source, null); - } - - public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { + public DocumentMapper parse(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { Map mapping = null; if (source != null) { Map root = XContentHelper.convertToMap(source.compressedReference(), true).v2(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index bcb7010f753..c3622b4a5b8 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -213,7 +213,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it // NOTE: never apply the default here - DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); + DocumentMapper mapper = documentParser.parse(type, mappingSource); // still add it as a document mapper so we have it registered and, for example, persisted back into // the cluster meta data if needed, or checked for existence try (ReleasableLock lock = mappingWriteLock.acquire()) { @@ -392,7 +392,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } else { defaultMappingSource = this.defaultMappingSource; } - return documentParser.parseCompressed(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); + return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } public boolean hasMapping(String mappingType) { diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index 697365151f7..cffdcdc86eb 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.codecs; import org.apache.lucene.codecs.Codec; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -46,7 +47,7 @@ public class CodecTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); if (v.onOrAfter(Version.V_2_0_0_beta1)) { fail("Elasticsearch 2.0 should not support custom postings formats"); } @@ -69,7 +70,7 @@ public class CodecTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); if (v.onOrAfter(Version.V_2_0_0_beta1)) { fail("Elasticsearch 2.0 should not support custom postings formats"); } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index fecb7e9b880..297cab86f5e 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -154,7 +155,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("analyzer", analyzerName).endObject().endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 616e7a44c12..ca207fbdc2c 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,7 +51,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { .endObject() .endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping)); ObjectArrayList bytesList1 = new ObjectArrayList<>(2); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 1c4514c9fc9..26ea97dbf15 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.English; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -74,7 +75,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("float").field("type", "float").endObject() .startObject("double").field("type", "double").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); for (int i = 0; i < atLeast; i++) { @@ -142,7 +143,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("long").field("type", "long").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10); @@ -219,7 +220,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("double").field("type", "double").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10); @@ -397,7 +398,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("geopoint").field("type", "geo_point").startObject("fielddata").field("format", "doc_values").endObject().endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index fccf642e9df..3206a5e87ae 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -31,7 +32,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .field("enabled", false).endObject().endObject().string(); - DocumentMapper mapper = mapperParser.parse(mapping); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); BytesReference bytes = XContentFactory.jsonBuilder() .startObject().startObject("foo") @@ -48,7 +49,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject("foo").field("enabled", false).endObject() .startObject("bar").field("type", "integer").endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = mapperParser.parse(mapping); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); BytesReference bytes = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index ff66ffc6181..d38e458248a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,7 +53,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject() @@ -72,7 +73,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject() @@ -93,7 +94,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", jsonBuilder() @@ -128,7 +129,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject().startObject("obj1") @@ -151,7 +152,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", jsonBuilder() @@ -214,7 +215,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); // foo is already defined in the mappings assertNull(update); @@ -227,7 +228,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); @@ -247,7 +248,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); @@ -268,7 +269,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); @@ -289,7 +290,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); @@ -309,7 +310,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject()); @@ -329,7 +330,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("foo").field("type", "object").endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); @@ -349,7 +350,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 0b6354a45b6..341ba25fd9f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllField; @@ -69,7 +70,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappers() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -88,7 +89,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testAllMappersNoBoost() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json"); IndexService index = createIndex("test"); - DocumentMapper docMapper = index.mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = index.mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -102,7 +103,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testAllMappersTermQuery() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -120,7 +121,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: make sure we see AllTermQuery even when offsets are indexed in the _all field: public void testAllMappersWithOffsetsTermQuery() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -139,7 +140,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: if _all doesn't index positions then we never use AllTokenStream, even if some fields have boost public void testBoostWithOmitPositions() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -150,7 +151,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: if no fields were boosted, we shouldn't use AllTokenStream public void testNoBoost() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -161,10 +162,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithReparse() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); @@ -179,7 +180,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithStore() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -196,10 +197,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithReparseWithStore() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json"); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); @@ -265,10 +266,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8(); logger.info(mapping); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping)); byte[] json = jsonBuilder().startObject() .field("foo", "bar") @@ -312,7 +313,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMultiField_includeInAllSetToFalse() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_include_in_all_set_to_false.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject() @@ -330,7 +331,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMultiField_defaults() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_default.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject() @@ -350,7 +351,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMisplacedTypeInRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_type_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -362,7 +363,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMistypedTypeInRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -374,7 +375,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMisplacedMappingAsRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_mapping_key_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -387,17 +388,17 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_template_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_date_formats_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_date_detection_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_numeric_detection_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); } // issue https://github.com/elasticsearch/elasticsearch/issues/5864 - public void testMetadataMappersStillWorking() { + public void testMetadataMappersStillWorking() throws MapperParsingException, IOException { String mapping = "{"; Map rootTypes = new HashMap<>(); //just pick some example from DocumentMapperParser.rootTypeParsers @@ -410,7 +411,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { mapping += "\"" + key+ "\"" + ":" + rootTypes.get(key) + ",\n"; } mapping += "\"properties\":{}}" ; - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); } public void testDocValuesNotAllowed() throws IOException { @@ -419,7 +420,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { .field("doc_values", true) .endObject().endObject().endObject().string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail(); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); @@ -433,7 +434,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); try { - createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse(mapping); + createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail(); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); @@ -458,7 +459,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testIncludeInObjectBackcompat() throws Exception { String mapping = jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().field("_all", "foo").endObject().bytes()); @@ -470,7 +471,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index 06f42b3f49c..308478ad49d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.binary; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -50,7 +51,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); @@ -67,7 +68,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); // case 1: a simple binary value final byte[] binaryValue1 = new byte[100]; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index 05a0a03cc59..5480992acb2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.boost; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -39,7 +40,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index c9320e2da18..599084de2e6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.boost; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -44,7 +45,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("boost", 2.0).field("value", "some name").endObject() .startObject("int_field").field("boost", 3.0).field("value", 10).endObject() @@ -94,7 +95,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); try { docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("foo", "bar") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java index 1cfee0dd66e..4fe0f9b77b4 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java @@ -47,7 +47,7 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); - documentMapper = index.mapperService().documentMapperParser().parse(documentMapper.mappingSource().string()); + documentMapper = index.mapperService().documentMapperParser().parse("type", documentMapper.mappingSource()); assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java index 83160736865..5422d6c2f7d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -60,7 +61,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -93,7 +94,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -128,7 +129,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -153,7 +154,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -172,7 +173,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -191,7 +192,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -213,7 +214,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -235,7 +236,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -267,7 +268,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -299,7 +300,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() @@ -325,7 +326,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); @@ -378,7 +379,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); @@ -392,7 +393,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co", @@ -409,7 +410,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java index 4dc017aa6bd..5ecf7745e98 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.compound; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -35,7 +36,7 @@ public class CompoundTypesTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index 2fe0cf9f218..149d0be305f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.copyto; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -149,7 +150,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -175,7 +176,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -211,7 +212,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -240,7 +241,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -274,7 +275,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -308,7 +309,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapperBefore = parser.parse(mappingBefore); + DocumentMapper docMapperBefore = parser.parse("type1", new CompressedXContent(mappingBefore)); List fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); @@ -317,7 +318,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { assertThat(fields.get(1), equalTo("bar")); - DocumentMapper docMapperAfter = parser.parse(mappingAfter); + DocumentMapper docMapperAfter = parser.parse("type1", new CompressedXContent(mappingAfter)); docMapperBefore.merge(docMapperAfter.mapping(), true, false); @@ -372,7 +373,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { } mapping = mapping.endObject(); - DocumentMapper mapper = parser.parse(mapping.string()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); XContentBuilder jsonDoc = XContentFactory.jsonBuilder() .startObject() @@ -452,7 +453,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 53b930b87f8..2bf97a4885c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -57,7 +58,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -86,7 +87,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -102,7 +103,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); mapper = defaultMapper.mappers().getMapper("field"); builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index 4c0bffcc1bd..25a9adf7125 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.CannedTokenStream; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; @@ -50,7 +51,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -61,7 +62,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); + DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); stage1.merge(stage2.mapping(), true, false); // Just simulated so merge hasn't happened yet diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 8387f724618..a4009c8a861 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LocaleUtils; @@ -336,7 +337,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field3", "a") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index 24449015a21..bf3196fdcf7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.externalvalues; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -55,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() @@ -63,7 +64,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .startObject("field").field("type", "external").endObject() .endObject() .endObject().endObject().string() - ); + )); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -102,7 +103,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) @@ -121,7 +122,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().endObject() - .string()); + .string())); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -161,7 +162,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) @@ -183,7 +184,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().endObject() - .string()); + .string())); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 17a16913f3e..af39f45fa8d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -57,7 +57,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -87,7 +87,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -107,7 +107,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -127,7 +127,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -147,7 +147,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -171,7 +171,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -220,7 +220,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -283,7 +283,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -323,7 +323,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -350,7 +350,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -386,7 +386,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -410,7 +410,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -436,7 +436,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -472,7 +472,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -497,7 +497,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -521,7 +521,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -547,7 +547,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -590,7 +590,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]"); @@ -601,7 +601,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lat", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]"); @@ -612,7 +612,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lon", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]"); @@ -624,7 +624,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]"); @@ -635,7 +635,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lat", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]"); @@ -646,7 +646,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lon", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]"); @@ -667,44 +667,44 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate", false).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lat", false).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lon", false).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); // normalize mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lat", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lon", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); + parser.parse("type", new CompressedXContent(mapping)); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); } public void testGeoPointMapperMerge() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index 596efdcc273..380a1e04ad3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -46,7 +46,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -70,7 +70,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -87,7 +87,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -108,7 +108,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -123,7 +123,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -141,7 +141,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -164,7 +164,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -192,7 +192,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -216,7 +216,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -242,7 +242,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -266,7 +266,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -289,7 +289,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -311,7 +311,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -334,7 +334,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); @@ -356,7 +356,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 857dcd5d07c..9e0d7b596eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.GeoHashUtils; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -48,7 +49,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -72,7 +73,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -96,7 +97,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -117,7 +118,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; @@ -131,7 +132,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; @@ -145,7 +146,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java index 679b49e7be5..f8eaf89faab 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.id; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -43,7 +44,7 @@ public class IdMappingTests extends ESSingleNodeTestCase { public void testId() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -69,7 +70,7 @@ public class IdMappingTests extends ESSingleNodeTestCase { .startObject("_id").field("index", "not_analyzed").endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -85,7 +86,7 @@ public class IdMappingTests extends ESSingleNodeTestCase { .startObject("_id").field("path", "my_path").endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); // serialize the id mapping XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); @@ -103,7 +104,7 @@ public class IdMappingTests extends ESSingleNodeTestCase { public void testIncludeInObjectBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() @@ -117,7 +118,7 @@ public class IdMappingTests extends ESSingleNodeTestCase { public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java index 77fc409f30d..70a714b29ec 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -40,7 +41,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IndexFieldMapper indexMapper = docMapper.indexMapper(); assertThat(indexMapper.enabled(), equalTo(true)); @@ -58,7 +59,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class); assertThat(indexMapper.enabled(), equalTo(false)); @@ -75,7 +76,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { public void testDefaultDisabledIndexMapper() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class); assertThat(indexMapper.enabled(), equalTo(false)); @@ -94,13 +95,13 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { .startObject("_index").field("enabled", true).endObject() .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser(); - DocumentMapper mapperEnabled = parser.parse(mappingWithIndexEnabled); + DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(mappingWithIndexEnabled)); String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled); + DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(mappingWithIndexDisabled)); mapperEnabled.merge(mapperDisabled.mapping(), false, false); assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false)); @@ -111,12 +112,12 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { .startObject("_index").field("enabled", true).endObject() .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser(); - DocumentMapper enabledMapper = parser.parse(enabledMapping); + DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse(disabledMapping); + DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); enabledMapper.merge(disabledMapper.mapping(), false, false); assertThat(enabledMapper.indexMapper().enabled(), is(false)); @@ -129,7 +130,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { .field("store", "yes").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class); assertThat(indexMapper.enabled(), equalTo(true)); assertThat(indexMapper.fieldType().stored(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index f97b22e0ecb..d6c2cbf2e9f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -81,7 +82,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .startObject("_field_names").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().hasDocValues()); assertEquals(IndexOptions.DOCS, fieldNamesMapper.fieldType().indexOptions()); @@ -92,7 +93,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { public void testInjectIntoDocDuringParsing() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -110,7 +111,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().isEnabled()); @@ -127,7 +128,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); @@ -143,7 +144,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { public void testPre13Disabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_2_4.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); } @@ -155,7 +156,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); @@ -174,7 +175,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().stored()); } @@ -188,12 +189,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapperEnabled = parser.parse(enabledMapping); - DocumentMapper mapperDisabled = parser.parse(disabledMapping); + DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); + DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(disabledMapping)); mapperEnabled.merge(mapperDisabled.mapping(), false, false); assertFalse(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - mapperEnabled = parser.parse(enabledMapping); + mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); mapperDisabled.merge(mapperEnabled.mapping(), false, false); assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); } @@ -282,7 +283,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, indexService.analysisService(), indexService.similarityService(), mapperRegistry); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}")); IndexableField[] fields = parsedDocument.rootDoc().getFields(FieldNamesFieldMapper.NAME); boolean found = false; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java index 105b3b446ce..309fa274919 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.internal; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -31,7 +32,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase { public void testDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); assertTrue(typeMapper.fieldType().hasDocValues()); } @@ -41,7 +42,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id).build(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); assertFalse(typeMapper.fieldType().hasDocValues()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java index 4245641fd82..82a8918c66b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.ip; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -42,7 +43,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -82,7 +83,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { .field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject() .endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes()); @@ -104,7 +105,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); assertThat(doc.rootDoc().getField("field3"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index d67b97c3d85..89e6630ffa2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Numbers; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; @@ -60,7 +61,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index b2faf44e657..d86a93e3f1b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -51,13 +51,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("name").field("type", "string").endObject() .endObject().endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "string").endObject() .startObject("age").field("type", "integer").endObject() .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); + DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); stage1.merge(stage2.mapping(), true, false); // since we are simulating, we should not have the age mapping @@ -73,11 +73,11 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { public void testMergeObjectDynamic() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string(); - DocumentMapper mapper = parser.parse(objectMapping); + DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping)); assertNull(mapper.root().dynamic()); String withDynamicMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject().string(); - DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping); + DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); mapper.merge(withDynamicMapper.mapping(), false, false); @@ -89,11 +89,11 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "object").endObject() .endObject().endObject().endObject().string(); - DocumentMapper objectMapper = parser.parse(objectMapping); + DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping)); String nestedMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper nestedMapper = parser.parse(nestedMapping); + DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); try { objectMapper.merge(nestedMapper.mapping(), true, false); @@ -119,8 +119,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = parser.parse(mapping1); - DocumentMapper changed = parser.parse(mapping2); + DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); + DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); existing.merge(changed.mapping(), false, false); @@ -137,8 +137,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = parser.parse(mapping1); - DocumentMapper changed = parser.parse(mapping2); + DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); + DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); existing.merge(changed.mapping(), false, false); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 506e51f8070..8a66a78f3ac 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -77,7 +78,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { } private void testMultiField(String mapping) throws Exception { - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); @@ -161,7 +162,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { String builtMapping = builderDocMapper.mappingSource().string(); // System.out.println(builtMapping); // reparse it - DocumentMapper docMapper = mapperParser.parse(builtMapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); @@ -189,7 +190,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { public void testConvertMultiFieldNoDefaultField() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); @@ -261,7 +262,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json"); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("a"), notNullValue()); assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); @@ -377,7 +378,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { public void testConvertMultiFieldCompletion() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-completion.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("a"), notNullValue()); assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); @@ -457,7 +458,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { } builder = builder.endObject().endObject().endObject().endObject().endObject(); String mapping = builder.string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); Arrays.sort(multiFieldNames); Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2(); @@ -498,8 +499,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase { // Check the mapping remains identical when deserialed/re-serialsed final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(builder.string()); - DocumentMapper docMapper2 = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(builder.string())); + DocumentMapper docMapper2 = parser.parse("type", docMapper.mappingSource()); assertThat(docMapper.mappingSource(), equalTo(docMapper2.mappingSource())); } @@ -509,7 +510,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected mapping parse failure"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("cannot be used in multi field")); @@ -522,7 +523,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected mapping parse failure"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("cannot be used in multi field")); @@ -548,7 +549,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test").mapperService(); try { - mapperService.documentMapperParser().parse(mapping.string()); + mapperService.documentMapperParser().parse("my_type", new CompressedXContent(mapping.string())); fail("this should throw an exception because one field contains a dot"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 83e10bd826c..1a999a64018 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -43,7 +43,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -56,7 +56,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - DocumentMapper docMapper2 = parser.parse(mapping); + DocumentMapper docMapper2 = parser.parse("person", new CompressedXContent(mapping)); docMapper.merge(docMapper2.mapping(), true, false); @@ -77,7 +77,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - DocumentMapper docMapper3 = parser.parse(mapping); + DocumentMapper docMapper3 = parser.parse("person", new CompressedXContent(mapping)); docMapper.merge(docMapper3.mapping(), true, false); @@ -92,7 +92,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - DocumentMapper docMapper4 = parser.parse(mapping); + DocumentMapper docMapper4 = parser.parse("person", new CompressedXContent(mapping)); docMapper.merge(docMapper4.mapping(), true, false); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java index be27e9f83fb..6debfa05ee9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.nested; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -36,7 +37,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .startObject("nested1").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -62,7 +63,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .startObject("nested1").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -111,7 +112,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -162,7 +163,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -213,7 +214,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -264,7 +265,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -315,7 +316,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java index fedb2d83d5d..be3617aaa5a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java @@ -1,5 +1,7 @@ package org.elasticsearch.index.mapper.null_value; +import org.elasticsearch.common.compress.CompressedXContent; + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -49,7 +51,7 @@ public class NullValueTests extends ESSingleNodeTestCase { .endObject().string(); try { - indexService.mapperService().documentMapperParser().parse(mapping); + indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail("Test should have failed because [null_value] was null."); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Property [null_value] cannot be null.")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index d93ae9b6787..14b6e7c7110 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -113,7 +114,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -147,7 +148,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field3", "a") @@ -184,7 +185,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); //Test numbers passed as strings String invalidJsonNumberAsString="1"; @@ -284,7 +285,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -320,7 +321,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -352,7 +353,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .field("date_detection", true) .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -402,7 +403,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -472,7 +473,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -531,7 +532,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().endObject().string(); try { - parser.parse(mappingWithTV); + parser.parse("type", new CompressedXContent(mappingWithTV)); fail(); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]")); @@ -541,7 +542,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) .build(); parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse(mappingWithTV); // no exception + parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } public void testAnalyzerBackCompat() throws Exception { @@ -560,7 +561,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().endObject().string(); try { - parser.parse(mappingWithTV); + parser.parse("type", new CompressedXContent(mappingWithTV)); fail(); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]")); @@ -570,6 +571,6 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) .build(); parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse(mappingWithTV); // no exception + parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java index b13fcc8ed91..0a03601ea62 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.object; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -37,7 +38,7 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("obj1").field("type", "object").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java index 917ee9806ed..885e038de60 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.object; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -34,7 +35,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + " \"object\": {\n" + @@ -59,7 +60,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("properties").endArray() .endObject().endObject().string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); } public void testEmptyFieldsArrayMultiFields() throws Exception { @@ -77,7 +78,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { @@ -98,7 +99,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch(MapperParsingException e) { assertThat(e.getMessage(), containsString("expected map for property [fields]")); @@ -117,7 +118,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsWithFilledArrayShouldThrowException() throws Exception { @@ -134,7 +135,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Expected map for property [fields]")); @@ -160,6 +161,6 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index 126c223f49d..99c89ff542c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.parent; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -34,7 +35,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase { public void testParentSetInDocNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() @@ -50,7 +51,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase { .startObject("_parent").field("type", "p_type").endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() @@ -67,7 +68,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_parent").field("type", "p_type").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java index 2582562c039..715eefca9e6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.path; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -34,7 +35,7 @@ import static org.hamcrest.Matchers.nullValue; public class PathMapperTests extends ESSingleNodeTestCase { public void testPathMapping() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/path/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); // test full name assertThat(docMapper.mappers().getMapper("first1"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java index ff0971034f8..1f6ee7cce3b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -49,7 +50,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { public void testRoutingMapper() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() @@ -69,7 +70,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false)); assertEquals(IndexOptions.NONE, docMapper.routingFieldMapper().fieldType().indexOptions()); } @@ -79,7 +80,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { .startObject("_routing").field("store", "no").field("index", "no").endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(enabledMapping); + DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping)); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); enabledMapper.routingFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); @@ -102,7 +103,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { .startObject("_routing").field("path", "custom_routing").endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_routing", "routing_value").endObject(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -115,7 +116,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { public void testIncludeInObjectBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -129,7 +130,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 0e8c74aee89..b04d3a64a25 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.simple; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -64,10 +65,10 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { public void testParseToJsonAndParse() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc(); assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); @@ -76,7 +77,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { public void testSimpleParser() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); @@ -88,7 +89,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { public void testSimpleParserNoTypeNoId() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); @@ -98,12 +99,12 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { public void testAttributes() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); String builtMapping = docMapper.mappingSource().string(); - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1")); } @@ -132,7 +133,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { .startObject("foo.bar").field("type", "string").endObject() .endObject().endObject().string(); try { - mapperParser.parse(mapping); + mapperParser.parse("type", new CompressedXContent(mapping)); fail("Mapping parse should have failed"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 5a6521ee3c9..ea97aa6b8cf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -50,14 +50,14 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper documentMapper = parser.parse(mapping); + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .field("field", "value") .endObject().bytes()); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); - documentMapper = parser.parse(mapping); + documentMapper = parser.parse("type", new CompressedXContent(mapping)); doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject() .field("field", "value") .endObject().bytes()); @@ -74,7 +74,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .build(); DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - parser.parse(mapping); // no exception + parser.parse("type", new CompressedXContent(mapping)); // no exception } public void testIncludes() throws Exception { @@ -82,7 +82,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("includes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -103,7 +103,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("excludes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -137,7 +137,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { // all is well } try { - mapper = parser.parse(null, "{}", defaultMapping); + mapper = parser.parse(null, new CompressedXContent("{}"), defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(false)); fail(); @@ -156,7 +156,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", new CompressedXContent(mapping), defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(true)); } @@ -193,13 +193,13 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { } void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException { - DocumentMapper docMapper = parser.parse(mapping1); - docMapper = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); + docMapper = parser.parse("type", docMapper.mappingSource()); if (conflicts.length == 0) { - docMapper.merge(parser.parse(mapping2).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); } else { try { - docMapper.merge(parser.parse(mapping2).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); fail(); } catch (IllegalArgumentException e) { for (String conflict : conflicts) { @@ -264,27 +264,27 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { public void testComplete() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - assertTrue(parser.parse(mapping).sourceMapper().isComplete()); + assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); } public void testSourceObjectContainsExtraTokens() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { documentMapper.parse("test", "type", "1", new BytesArray("{}}")); // extra end object (invalid JSON) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index cadd9dd673c..aac4b81aff2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -76,7 +76,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -122,7 +122,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception { String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string(); - mapper = parser.parse(mapping); + mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2345") @@ -136,7 +136,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -154,7 +154,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -177,7 +177,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -200,7 +200,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -242,7 +242,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) { Map serializedMap = getSerializedMap(fieldName, mapper); assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer")); @@ -266,7 +266,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - mapper = parser.parse(mapping); + mapper = parser.parse("type", new CompressedXContent(mapping)); for (String fieldName : Arrays.asList("field1", "field2")) { Map serializedMap = getSerializedMap(fieldName, mapper); assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); @@ -318,7 +318,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -389,7 +389,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -443,7 +443,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -492,7 +492,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false); + defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), false, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -507,7 +507,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); try { - defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false); + defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), true, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("different [omit_norms]")); @@ -531,7 +531,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .field("analyzer", "standard") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("Mapping definition should fail with the position_offset_gap setting"); }catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]"); @@ -554,8 +554,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .field("type", "string") .field("position_offset_gap", 10) .endObject().endObject().endObject().endObject().string(); - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"position_increment_gap\":10")); + assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"position_increment_gap\":10")); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index ce4e2cada7c..01d4c6a1234 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -75,7 +75,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -90,7 +90,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", "yes").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -108,7 +108,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { for (String mapping : Arrays.asList( XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(), XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) { - DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); @@ -130,7 +130,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .field("doc_values", true) .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(true)); assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false)); assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions()); @@ -144,12 +144,12 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", true).endObject() .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper enabledMapper = parser.parse(enabledMapping); + DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse(disabledMapping); + DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); enabledMapper.merge(disabledMapper.mapping(), false, false); @@ -161,7 +161,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("store", "yes").field("index", "no").endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(enabledMapping); + DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping)); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); enabledMapper.timestampFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); @@ -192,7 +192,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -218,7 +218,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -246,7 +246,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .field("foo", "bar") .endObject(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -271,7 +271,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .field("foo", "bar") .endObject(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -298,7 +298,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -326,7 +326,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -349,7 +349,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); @@ -371,7 +371,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -394,7 +394,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); @@ -412,7 +412,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false")); @@ -432,7 +432,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -506,14 +506,14 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .endObject().endObject().string(); - docMapper.merge(parser.parse(mapping).mapping(), false, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping)).mapping(), false, false); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); } @@ -526,8 +526,8 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); - docMapper = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + docMapper = parser.parse("type", docMapper.mappingSource()); assertThat(docMapper.mappingSource().string(), equalTo(mapping)); } @@ -546,9 +546,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized(); - docMapper = parser.parse(docMapper.mappingSource().string()); + docMapper = parser.parse("type", docMapper.mappingSource()); assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized())); } @@ -674,13 +674,13 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException { - DocumentMapper docMapper = parser.parse(mapping1); - docMapper = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); + docMapper = parser.parse("type", docMapper.mappingSource()); if (conflict == null) { - docMapper.merge(parser.parse(mapping2).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); } else { try { - docMapper.merge(parser.parse(mapping2).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString(conflict)); @@ -735,9 +735,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { void assertDocValuesSerialization(String mapping) throws Exception { DocumentMapperParser parser = createIndex("test_doc_values", BWC_SETTINGS).mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues(); - docMapper = parser.parse(docMapper.mappingSource().string()); + docMapper = parser.parse("type", docMapper.mappingSource()); assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues)); assertAcked(client().admin().indices().prepareDelete("test_doc_values")); } @@ -746,7 +746,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject(); @@ -762,7 +762,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject(); @@ -779,7 +779,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() @@ -794,7 +794,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject(); @@ -815,7 +815,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // // test with older versions Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build(); - DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); @@ -827,7 +827,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // // test with 2.x - DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse(mapping); + DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData(); // this works with 2.x diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index 444d692079a..c43a7f1ddc5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -50,7 +50,7 @@ import static org.hamcrest.Matchers.notNullValue; public class TTLMappingTests extends ESSingleNodeTestCase { public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -65,7 +65,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", "yes").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -80,7 +80,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testDefaultValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled)); assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored())); assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions())); @@ -93,7 +93,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(true)); assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(true)); // store was never serialized, so it was always lost @@ -112,8 +112,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl); - DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl); + DocumentMapper mapperWithoutTtl = parser.parse("type", new CompressedXContent(mappingWithoutTtl)); + DocumentMapper mapperWithTtl = parser.parse("type", new CompressedXContent(mappingWithTtl)); mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); @@ -136,8 +136,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse(mappingWithTtl); - DocumentMapper updatedMapper = parser.parse(updatedMapping); + DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); + DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(updatedMapping)); initialMapper.merge(updatedMapper.mapping(), true, false); @@ -148,8 +148,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse(mappingWithTtl); - DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled); + DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); + DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(mappingWithTtlDisabled)); try { initialMapper.merge(updatedMapper.mapping(), true, false); @@ -278,7 +278,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .startObject("_ttl").field("enabled", true).endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_ttl", "2d").endObject(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -294,7 +294,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java index 26d710b137f..e5d6431492d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.typelevels; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -33,7 +34,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevel() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -51,7 +52,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevel() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -69,7 +70,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsValue() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -89,7 +90,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsValue() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -109,7 +110,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsObject() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -129,7 +130,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsObject() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -149,7 +150,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -169,7 +170,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -189,7 +190,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -210,7 +211,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java index d99efee6824..1d849d50932 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.typelevels; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; @@ -34,11 +35,7 @@ public class ParseMappingTypeLevelTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapper = parser.parse("type", mapping); - assertThat(mapper.type(), equalTo("type")); - assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true)); - - mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertThat(mapper.type(), equalTo("type")); assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 965916284a3..5b2a368661a 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.similarities.LMDirichletSimilarity; import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -60,7 +61,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.discount_overlaps", false) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DefaultSimilarityProvider.class)); DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -81,7 +82,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.discount_overlaps", false) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(BM25SimilarityProvider.class)); BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -105,7 +106,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DFRSimilarityProvider.class)); DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -130,7 +131,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(IBSimilarityProvider.class)); IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -152,7 +153,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.mu", 3000f) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMDirichletSimilarityProvider.class)); LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -171,7 +172,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.lambda", 0.7f) .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 15e449f786d..1dc7d20c7b7 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -58,7 +59,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -96,7 +97,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -129,7 +130,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -164,7 +165,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); XContentBuilder builder = jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 51cd83c3c5d..9e5dd86fea6 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.GeoHashUtils; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -58,7 +59,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -97,7 +98,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -133,7 +134,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -176,7 +177,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); XContentBuilder builder = jsonBuilder() diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java index f93785ed14a..7b93dbc3155 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -43,7 +44,7 @@ public class DateAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/date/date-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); // Our mapping should be kept as a String assertThat(docMapper.mappers().getMapper("file.date"), instanceOf(StringFieldMapper.class)); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index eda6f7669d7..f550193b847 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -49,7 +50,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); @@ -78,7 +79,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { public void testMultipleDocsEncryptedFirst() throws IOException { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); @@ -111,7 +112,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java index 868ecb3ae55..190231eb95e 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.MapperTestUtils; @@ -53,7 +54,7 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa Settings.settingsBuilder().put("index.mapping.attachment.detect_language", langDetect).build(), getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/language/language-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("file.language"), instanceOf(StringFieldMapper.class)); } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index 42d13fce58a..0a0f69edcfd 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -52,7 +53,7 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/metadata/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/" + filename); BytesReference json = jsonBuilder() diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index 266c7cdd335..9e756796352 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.Base64; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MapperTestUtils; @@ -62,7 +63,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("file.content"), instanceOf(StringFieldMapper.class)); @@ -98,7 +99,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); - DocumentMapper documentMapper = mapperService.documentMapperParser().parse(mapping); + DocumentMapper documentMapper = mapperService.documentMapperParser().parse("person", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("person", "person", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index c855b45e846..7618c4de5a1 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -46,7 +46,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); @@ -58,7 +58,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = mapperParser.parse(builtMapping); + docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("file", html).endObject().bytes(); @@ -74,7 +74,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(), getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); @@ -89,7 +89,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappingsWithAllFields() throws Exception { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); @@ -101,7 +101,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = mapperParser.parse(builtMapping); + docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("file", html).endObject().bytes(); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java index fcd430d0fbc..137980c6b36 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolConfig; import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -92,7 +93,7 @@ public class StandaloneRunner extends CliTool { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); // use CWD b/c it won't be used String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); } @Override diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index 8743ed75934..94beb54d281 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.mapper.attachments; import org.apache.tika.io.IOUtils; import org.apache.tika.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -58,7 +59,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); } /** diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index da65210f6d9..603fcbbf820 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -59,7 +60,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field") .field("type", "murmur3") .endObject().endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes()); IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); assertNotNull(fields); @@ -76,7 +77,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", false) .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified")); @@ -89,7 +90,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", true) .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified")); @@ -103,7 +104,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "not_analyzed") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); @@ -116,7 +117,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "no") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); @@ -134,7 +135,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", false) .endObject().endObject().endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); assertFalse(mapper.fieldType().hasDocValues()); } @@ -150,7 +151,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "not_analyzed") .endObject().endObject().endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); assertEquals(IndexOptions.DOCS, mapper.fieldType().indexOptions()); } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 9661de381c5..f9dcee1efe9 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.size; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -60,7 +61,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -85,7 +86,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -102,7 +103,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -117,7 +118,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { public void testSizeNotSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -133,12 +134,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = parser.parse(enabledMapping); + DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse(disabledMapping); + DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); enabledMapper.merge(disabledMapper.mapping(), false, false); assertThat(enabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); From eb64a81d0593c2c3732f9a108b353ee24c9d2587 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 14:39:58 +0100 Subject: [PATCH 187/322] Remove `index.merge.scheduler.notify_on_failure` and default to `true` This setting was undocumented and should not be set by any user. We should fail the shard instead. Closes #15570 --- .../index/engine/InternalEngine.java | 24 +++++++++---------- .../index/shard/MergeSchedulerConfig.java | 20 ++++------------ 2 files changed, 16 insertions(+), 28 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 80d0b5b3006..7911d219684 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1129,20 +1129,18 @@ public class InternalEngine extends Engine { @Override protected void handleMergeException(final Directory dir, final Throwable exc) { logger.error("failed to merge", exc); - if (config().getMergeSchedulerConfig().isNotifyOnMergeFailure()) { - engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Throwable t) { - logger.debug("merge failure action rejected", t); - } + engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Throwable t) { + logger.debug("merge failure action rejected", t); + } - @Override - protected void doRun() throws Exception { - MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir); - failEngine("merge failed", e); - } - }); - } + @Override + protected void doRun() throws Exception { + MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir); + failEngine("merge failed", e); + } + }); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java index c329722a135..a90bf2d7d91 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java @@ -29,19 +29,19 @@ import org.elasticsearch.index.IndexSettings; * merge operations once they are needed (according to the merge policy). Merges * run in separate threads, and when the maximum number of threads is reached, * further merges will wait until a merge thread becomes available. - * + * *

      The merge scheduler supports the following dynamic settings: - * + * *

        *
      • index.merge.scheduler.max_thread_count: - * + * * The maximum number of threads that may be merging at once. Defaults to * Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors() / 2)) * which works well for a good solid-state-disk (SSD). If your index is on * spinning platter drives instead, decrease this to 1. - * + * *
      • index.merge.scheduler.auto_throttle: - * + * * If this is true (the default), then the merge scheduler will rate-limit IO * (writes) for merges to an adaptive value depending on how many merges are * requested over time. An application with a low indexing rate that @@ -55,19 +55,16 @@ public final class MergeSchedulerConfig { public static final String MAX_THREAD_COUNT = "index.merge.scheduler.max_thread_count"; public static final String MAX_MERGE_COUNT = "index.merge.scheduler.max_merge_count"; public static final String AUTO_THROTTLE = "index.merge.scheduler.auto_throttle"; - public static final String NOTIFY_ON_MERGE_FAILURE = "index.merge.scheduler.notify_on_failure"; // why would we not wanna do this? private volatile boolean autoThrottle; private volatile int maxThreadCount; private volatile int maxMergeCount; - private final boolean notifyOnMergeFailure; public MergeSchedulerConfig(IndexSettings indexSettings) { final Settings settings = indexSettings.getSettings(); maxThreadCount = settings.getAsInt(MAX_THREAD_COUNT, Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(settings) / 2))); maxMergeCount = settings.getAsInt(MAX_MERGE_COUNT, maxThreadCount + 5); this.autoThrottle = settings.getAsBoolean(AUTO_THROTTLE, true); - notifyOnMergeFailure = settings.getAsBoolean(NOTIFY_ON_MERGE_FAILURE, true); } /** @@ -114,11 +111,4 @@ public final class MergeSchedulerConfig { public void setMaxMergeCount(int maxMergeCount) { this.maxMergeCount = maxMergeCount; } - - /** - * Returns true iff we fail the engine on a merge failure. Default is true - */ - public boolean isNotifyOnMergeFailure() { - return notifyOnMergeFailure; - } } From afc1cc19af8d6333587194857f414eee6377ef8b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 15:15:00 +0100 Subject: [PATCH 188/322] Simplify translog-based flush settings This commit removes `index.translog.flush_threshold_ops` and `index.translog.disable_flush` in favor of `index.translog.flush_threshold_size`. The number of operations is meaningless by itself and can easily be turned into a size value with knowledge of the data. Disabling the flush is only useful in tests and we can set the size value to a really high value. If users really need to do this they can also apply a very high value like `1PB`. --- .../elasticsearch/cluster/ClusterModule.java | 2 -- .../elasticsearch/index/shard/IndexShard.java | 32 ++++--------------- .../org/elasticsearch/get/GetActionIT.java | 12 +++---- .../index/IndexWithShadowReplicasIT.java | 4 ++- .../index/shard/IndexShardTests.java | 7 ++-- .../index/store/CorruptedFileIT.java | 9 +++--- .../index/store/CorruptedTranslogIT.java | 15 +++++++++ .../elasticsearch/indices/flush/FlushIT.java | 6 ++-- .../reference/index-modules/translog.asciidoc | 5 --- .../elasticsearch/test/ESIntegTestCase.java | 17 +--------- 10 files changed, 44 insertions(+), 65 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index facddab3fb9..6f7eb20874f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -182,9 +182,7 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2); registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, Validator.INTEGER); registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, Validator.EMPTY); registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index a67ca309bc9..b0d19a2c0b2 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -188,18 +188,14 @@ public class IndexShard extends AbstractIndexShardComponent { private final ShardEventListener shardEventListener = new ShardEventListener(); private volatile boolean flushOnClose = true; - private volatile int flushThresholdOperations; private volatile ByteSizeValue flushThresholdSize; - private volatile boolean disableFlush; /** * Index setting to control if a flush is executed before engine is closed * This setting is realtime updateable. */ public static final String INDEX_FLUSH_ON_CLOSE = "index.flush_on_close"; - public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS = "index.translog.flush_threshold_ops"; public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size"; - public static final String INDEX_TRANSLOG_DISABLE_FLUSH = "index.translog.disable_flush"; /** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */ public static final String INDEX_SHARD_INACTIVE_TIME_SETTING = "index.shard.inactive_time"; private static final String INDICES_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time"; @@ -270,9 +266,7 @@ public class IndexShard extends AbstractIndexShardComponent { } this.engineConfig = newEngineConfig(translogConfig, cachingPolicy); - this.flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, settings.getAsInt("index.translog.flush_threshold", Integer.MAX_VALUE)); this.flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); - this.disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.provider = provider; this.searcherWrapper = indexSearcherWrapper; @@ -1136,15 +1130,13 @@ public class IndexShard extends AbstractIndexShardComponent { * Otherwise false. */ boolean shouldFlush() { - if (disableFlush == false) { - Engine engine = getEngineOrNull(); - if (engine != null) { - try { - Translog translog = engine.getTranslog(); - return translog.totalOperations() > flushThresholdOperations || translog.sizeInBytes() > flushThresholdSize.bytes(); - } catch (AlreadyClosedException | EngineClosedException ex) { - // that's fine we are already close - no need to flush - } + Engine engine = getEngineOrNull(); + if (engine != null) { + try { + Translog translog = engine.getTranslog(); + return translog.sizeInBytes() > flushThresholdSize.bytes(); + } catch (AlreadyClosedException | EngineClosedException ex) { + // that's fine we are already close - no need to flush } } return false; @@ -1156,21 +1148,11 @@ public class IndexShard extends AbstractIndexShardComponent { if (state() == IndexShardState.CLOSED) { // no need to update anything if we are closed return; } - int flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, this.flushThresholdOperations); - if (flushThresholdOperations != this.flushThresholdOperations) { - logger.info("updating flush_threshold_ops from [{}] to [{}]", this.flushThresholdOperations, flushThresholdOperations); - this.flushThresholdOperations = flushThresholdOperations; - } ByteSizeValue flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, this.flushThresholdSize); if (!flushThresholdSize.equals(this.flushThresholdSize)) { logger.info("updating flush_threshold_size from [{}] to [{}]", this.flushThresholdSize, flushThresholdSize); this.flushThresholdSize = flushThresholdSize; } - boolean disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, this.disableFlush); - if (disableFlush != this.disableFlush) { - logger.info("updating disable_flush from [{}] to [{}]", this.disableFlush, disableFlush); - this.disableFlush = disableFlush; - } final EngineConfig config = engineConfig; final boolean flushOnClose = settings.getAsBoolean(INDEX_FLUSH_ON_CLOSE, this.flushOnClose); diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index f41f4adc74e..6d04836a810 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -874,7 +874,7 @@ public class GetActionIT extends ESIntegTestCase { public void testUngeneratedFieldsThatAreNeverStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -913,7 +913,7 @@ public class GetActionIT extends ESIntegTestCase { public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -983,7 +983,7 @@ public class GetActionIT extends ESIntegTestCase { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + @@ -1008,7 +1008,7 @@ public class GetActionIT extends ESIntegTestCase { public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -1074,7 +1074,7 @@ public class GetActionIT extends ESIntegTestCase { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + @@ -1126,7 +1126,7 @@ public class GetActionIT extends ESIntegTestCase { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 9f22d44a214..8333080dac5 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -34,6 +34,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShadowIndexShard; import org.elasticsearch.index.translog.TranslogStats; @@ -179,7 +181,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7b5e4642832..09f3440670c 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -697,7 +697,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); @@ -713,8 +713,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { shard.getEngine().getTranslog().sync(); long size = shard.getEngine().getTranslog().sizeInBytes(); logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1000) - .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); @@ -732,7 +731,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index bee540ba9ea..496bdc21b15 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; @@ -142,7 +143,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -247,7 +248,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -473,7 +474,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -528,7 +529,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, cluster().numDataNodes() - 1) .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 6194183c983..f34c83da64b 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -29,6 +29,9 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.monitor.fs.FsInfo; @@ -167,4 +170,16 @@ public class CorruptedTranslogIT extends ESIntegTestCase { } assertThat("no file corrupted", fileToCorrupt, notNullValue()); } + + /** Disables translog flushing for the specified index */ + private static void disableTranslogFlush(String index) { + Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).build(); + client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); + } + + /** Enables translog flushing for the specified index */ + private static void enableTranslogFlush(String index) { + Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)).build(); + client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); + } } diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 4fdd76dae8d..7acc289e209 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -28,7 +28,10 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -144,14 +147,13 @@ public class FlushIT extends ESIntegTestCase { } } - @TestLogging("indices:TRACE") public void testSyncedFlushWithConcurrentIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); createIndex("test"); client().admin().indices().prepareUpdateSettings("test").setSettings( - Settings.builder().put("index.translog.disable_flush", true).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) + Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) .get(); ensureGreen(); final AtomicBoolean stop = new AtomicBoolean(false); diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index ad704299529..b8ada9a55a3 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -20,7 +20,6 @@ replaying its operations take a considerable amount of time during recovery. It is also exposed through an API, though its rarely needed to be performed manually. - [float] === Flush settings @@ -31,10 +30,6 @@ control how often the in-memory buffer is flushed to disk: Once the translog hits this size, a flush will happen. Defaults to `512mb`. -`index.translog.flush_threshold_ops`:: - -After how many operations to flush. Defaults to `unlimited`. - [float] === Translog settings diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 5d08f78a90e..f2c7d7c4772 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -511,14 +511,11 @@ public abstract class ESIntegTestCase extends ESTestCase { } private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) { - if (random.nextBoolean()) { - builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, RandomInts.randomIntBetween(random, 1, 10000)); - } if (random.nextBoolean()) { builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); } if (random.nextBoolean()) { - builder.put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, random.nextBoolean()); + builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { builder.put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durabilty.values())); @@ -1452,18 +1449,6 @@ public abstract class ESIntegTestCase extends ESTestCase { private AtomicInteger dummmyDocIdGenerator = new AtomicInteger(); - /** Disables translog flushing for the specified index */ - public static void disableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true).build(); - client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); - } - - /** Enables translog flushing for the specified index */ - public static void enableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, false).build(); - client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); - } - /** Disables an index block for the specified index */ public static void disableIndexBlock(String index, String block) { Settings settings = Settings.builder().put(block, false).build(); From 613e8a9782cce5b31d55309a17f085b5bb24574b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 15:20:32 +0100 Subject: [PATCH 189/322] add note to migration guide --- docs/reference/migration/migrate_3_0.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 0179e289b99..83107cbfa9f 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -200,6 +200,11 @@ If you are using any of these settings please take the time and review their pur _expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent cluster settings please use the settings update API and set their superseded keys accordingly. +==== Translog settings + +The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log +growth use `index.translog.flush_threshold_size` instead. + [[breaking_30_mapping_changes]] === Mapping changes From 7fe2eddfec6fbed8b2a5397465ad9111eb4f85cb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 15:26:56 +0100 Subject: [PATCH 190/322] fix test --- .../java/org/elasticsearch/index/shard/IndexShardTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 09f3440670c..e1c7cad5607 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -697,7 +697,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); @@ -731,7 +731,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); From 0cdbcdab64ee37229c709c2f301c8065a5da56ec Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 15:48:16 +0100 Subject: [PATCH 191/322] added 2.3.0-SNAPSHOT as a Version constant --- core/src/main/java/org/elasticsearch/Version.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index b8ba0a411a9..ac2575597e8 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -279,6 +279,8 @@ public class Version { public static final Version V_2_1_2 = new Version(V_2_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); + public static final int V_2_3_0_ID = 2030099; + public static final Version V_2_3_0 = new Version(V_2_3_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final int V_3_0_0_ID = 3000099; public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final Version CURRENT = V_3_0_0; @@ -295,6 +297,8 @@ public class Version { switch (id) { case V_3_0_0_ID: return V_3_0_0; + case V_2_3_0_ID: + return V_2_3_0; case V_2_2_0_ID: return V_2_2_0; case V_2_1_2_ID: From 4d32cc0b9f6ac41dca4674bbbadbf1e677e5d305 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 21 Dec 2015 14:29:48 +0100 Subject: [PATCH 192/322] Queries.calculateMinShouldMatch returns the number of "min should match" clauses that the user wanted even if the number of optional clauses is smaller than the provided number. In such case the query now returns no result. Closes #15521 --- .../common/lucene/search/Queries.java | 4 +- .../index/query/BoolQueryBuilder.java | 3 +- .../index/query/BoolQueryBuilderTests.java | 18 +++++++ .../search/query/SearchQueryIT.java | 53 +++++++++++++++++++ 4 files changed, 73 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 5ecd22eab1b..73c3fc9400d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -179,8 +179,6 @@ public class Queries { result = calc < 0 ? result + calc : calc; } - return (optionalClauseCount < result ? - optionalClauseCount : (result < 0 ? 0 : result)); - + return result < 0 ? 0 : result; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index b8170a3195a..69ee2a81061 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -273,8 +273,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { return new MatchAllDocsQuery(); } final String minimumShouldMatch; - if (context.isFilter() && this.minimumShouldMatch == null) { - //will be applied for real only if there are should clauses + if (context.isFilter() && this.minimumShouldMatch == null && shouldClauses.size() > 0) { minimumShouldMatch = "1"; } else { minimumShouldMatch = this.minimumShouldMatch; diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index b1a4f7ccde1..1c407fbaa0e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -254,6 +254,24 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase # optional clauses returns no docs. + multiMatchQuery = multiMatchQuery("value1 value2 value3", "field1", "field2"); + multiMatchQuery.minimumShouldMatch("4"); + searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + assertHitCount(searchResponse, 0l); + } + + public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException { + createIndex("test"); + client().prepareIndex("test", "type1", "1").setSource("field1", new String[]{"value1", "value2", "value3"}).get(); + client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + refresh(); + + BoolQueryBuilder boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)); + SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("1")); + + boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(1)) + // Only one should clause is defined, returns no docs. + .minimumNumberShouldMatch(2); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 0l); + + boolQuery = boolQuery() + .should(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)) + .minimumNumberShouldMatch(1); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("1")); + + boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .must(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 0l); } public void testFuzzyQueryString() { From 81fd2169cf9f394c7be2a853caae5a54c0c75b18 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 15 Dec 2015 12:07:07 +0100 Subject: [PATCH 193/322] Renames "default" similarity into "classic". Replaces deprecated DefaultSimilarity by ClassicSimilarity. Fixes #15102 --- .../lucene/queries/BlendedTermQuery.java | 2 +- .../action/termvectors/TermVectorsFilter.java | 4 +- .../lucene/search/MoreLikeThisQuery.java | 4 +- .../common/lucene/search/XMoreLikeThis.java | 6 +- .../index/mapper/core/TypeParsers.java | 20 ++- ...er.java => ClassicSimilarityProvider.java} | 14 +- .../index/similarity/SimilarityService.java | 6 +- .../lucene/queries/BlendedTermQueryTests.java | 4 +- .../index/similarity/SimilarityTests.java | 159 ++++++++++++------ .../similarity/SimilarityIT.java | 4 +- .../index-modules/similarity.asciidoc | 8 +- .../mapping/params/similarity.asciidoc | 4 +- docs/reference/mapping/types/string.asciidoc | 2 +- docs/reference/migration/migrate_3_0.asciidoc | 4 + 14 files changed, 155 insertions(+), 86 deletions(-) rename core/src/main/java/org/elasticsearch/index/similarity/{DefaultSimilarityProvider.java => ClassicSimilarityProvider.java} (75%) diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 0d78c952765..645929d3992 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -54,7 +54,7 @@ import java.util.Objects; * While aggregating the total term frequency is trivial since it * can be summed up not every {@link org.apache.lucene.search.similarities.Similarity} * makes use of this statistic. The document frequency which is used in the - * {@link org.apache.lucene.search.similarities.DefaultSimilarity} + * {@link org.apache.lucene.search.similarities.ClassicSimilarity} * can only be estimated as an lower-bound since it is a document based statistic. For * the document frequency the maximum frequency across all fields per term is used * which is the minimum number of documents the terms occurs in. diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java index e6904ee5ede..cdeed093eed 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.TermStatistics; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; @@ -67,7 +67,7 @@ public class TermVectorsFilter { this.dfs = dfs; this.scoreTerms = new HashMap<>(); - this.similarity = new DefaultSimilarity(); + this.similarity = new ClassicSimilarity(); } public void setSettings(TermVectorsRequest.FilterSettings settings) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index 4e2aa5e7eef..fbe0c28e341 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -30,7 +30,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; @@ -138,7 +138,7 @@ public class MoreLikeThisQuery extends Query { if (rewritten != this) { return rewritten; } - XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new DefaultSimilarity() : similarity); + XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new ClassicSimilarity() : similarity); mlt.setFieldNames(moreLikeFields); mlt.setAnalyzer(analyzer); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index c223ee46a2f..16378523b5c 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -52,7 +52,7 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; @@ -304,7 +304,7 @@ public final class XMoreLikeThis { /** * For idf() calculations. */ - private TFIDFSimilarity similarity;// = new DefaultSimilarity(); + private TFIDFSimilarity similarity;// = new ClassicSimilarity(); /** * IndexReader to use @@ -346,7 +346,7 @@ public final class XMoreLikeThis { * Constructor requiring an IndexReader. */ public XMoreLikeThis(IndexReader ir) { - this(ir, new DefaultSimilarity()); + this(ir, new ClassicSimilarity()); } public XMoreLikeThis(IndexReader ir, TFIDFSimilarity sim) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 00d39791c9c..3ebb4e137e1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -35,6 +35,8 @@ import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.similarity.SimilarityProvider; +import org.elasticsearch.index.similarity.SimilarityService; import java.util.ArrayList; import java.util.Collections; @@ -162,7 +164,8 @@ public class TypeParsers { builder.omitNorms(nodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("similarity")) { - builder.similarity(parserContext.getSimilarity(propNode.toString())); + SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); + builder.similarity(similarityProvider); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); @@ -296,7 +299,8 @@ public class TypeParsers { // ignore for old indexes iterator.remove(); } else if (propName.equals("similarity")) { - builder.similarity(parserContext.getSimilarity(propNode.toString())); + SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); + builder.similarity(similarityProvider); iterator.remove(); } else if (propName.equals("fielddata")) { final Settings settings = Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build(); @@ -455,4 +459,16 @@ public class TypeParsers { builder.copyTo(copyToBuilder.build()); } + private static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) { + if (parserContext.indexVersionCreated().before(Version.V_3_0_0) && + "default".equals(value) && parserContext.getSimilarity(value) == null) { + // "default" similarity has been renamed into "classic" in 3.x. + value = SimilarityService.DEFAULT_SIMILARITY; + } + SimilarityProvider similarityProvider = parserContext.getSimilarity(value); + if (similarityProvider == null) { + throw new MapperParsingException("Unknown Similarity type [" + value + "] for [" + name + "]"); + } + return similarityProvider; + } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java similarity index 75% rename from core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java rename to core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java index 3acbd9821af..f9a6ff2f5fb 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java @@ -19,23 +19,23 @@ package org.elasticsearch.index.similarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.elasticsearch.common.settings.Settings; /** - * {@link SimilarityProvider} for {@link DefaultSimilarity}. + * {@link SimilarityProvider} for {@link ClassicSimilarity}. *

        * Configuration options available: *

          *
        • discount_overlaps
        • *
        - * @see DefaultSimilarity For more information about configuration + * @see ClassicSimilarity For more information about configuration */ -public class DefaultSimilarityProvider extends AbstractSimilarityProvider { +public class ClassicSimilarityProvider extends AbstractSimilarityProvider { - private final DefaultSimilarity similarity = new DefaultSimilarity(); + private final ClassicSimilarity similarity = new ClassicSimilarity(); - public DefaultSimilarityProvider(String name, Settings settings) { + public ClassicSimilarityProvider(String name, Settings settings) { super(name); boolean discountOverlaps = settings.getAsBoolean("discount_overlaps", true); this.similarity.setDiscountOverlaps(discountOverlaps); @@ -45,7 +45,7 @@ public class DefaultSimilarityProvider extends AbstractSimilarityProvider { * {@inheritDoc} */ @Override - public DefaultSimilarity get() { + public ClassicSimilarity get() { return similarity; } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 1d08683f47b..7e7706281a4 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -35,7 +35,7 @@ import java.util.function.BiFunction; public final class SimilarityService extends AbstractIndexComponent { - public final static String DEFAULT_SIMILARITY = "default"; + public final static String DEFAULT_SIMILARITY = "classic"; private final Similarity defaultSimilarity; private final Similarity baseSimilarity; private final Map similarities; @@ -44,9 +44,9 @@ public final class SimilarityService extends AbstractIndexComponent { static { Map> defaults = new HashMap<>(); Map> buildIn = new HashMap<>(); - defaults.put("default", DefaultSimilarityProvider::new); + defaults.put("classic", ClassicSimilarityProvider::new); defaults.put("BM25", BM25SimilarityProvider::new); - buildIn.put("default", DefaultSimilarityProvider::new); + buildIn.put("classic", ClassicSimilarityProvider::new); buildIn.put("BM25", BM25SimilarityProvider::new); buildIn.put("DFR", DFRSimilarityProvider::new); buildIn.put("IB", IBSimilarityProvider::new); diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 0f29ed5a2f7..725b1bd4400 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; import org.apache.lucene.util.TestUtil; @@ -214,7 +214,7 @@ public class BlendedTermQueryTests extends ESTestCase { } public IndexSearcher setSimilarity(IndexSearcher searcher) { - Similarity similarity = random().nextBoolean() ? new BM25Similarity() : new DefaultSimilarity(); + Similarity similarity = random().nextBoolean() ? new BM25Similarity() : new ClassicSimilarity(); searcher.setSimilarity(similarity); return searcher; } diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 5b2a368661a..dd5ca6bcc51 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.similarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.DFRSimilarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DistributionSPL; import org.apache.lucene.search.similarities.IBSimilarity; import org.apache.lucene.search.similarities.LMDirichletSimilarity; @@ -31,11 +31,16 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; @@ -45,42 +50,43 @@ import static org.hamcrest.CoreMatchers.instanceOf; public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveDefaultSimilarities() { SimilarityService similarityService = createIndex("foo").similarityService(); - assertThat(similarityService.getSimilarity("default").get(), instanceOf(DefaultSimilarity.class)); + assertThat(similarityService.getSimilarity("classic").get(), instanceOf(ClassicSimilarity.class)); assertThat(similarityService.getSimilarity("BM25").get(), instanceOf(BM25Similarity.class)); + assertThat(similarityService.getSimilarity("default"), equalTo(null)); } - public void testResolveSimilaritiesFromMapping_default() throws IOException { + public void testResolveSimilaritiesFromMapping_classic() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "default") - .put("index.similarity.my_similarity.discount_overlaps", false) - .build(); + .put("index.similarity.my_similarity.type", "classic") + .put("index.similarity.my_similarity.discount_overlaps", false) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DefaultSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(ClassicSimilarityProvider.class)); - DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + ClassicSimilarity similarity = (ClassicSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "BM25") - .put("index.similarity.my_similarity.k1", 2.0f) - .put("index.similarity.my_similarity.b", 1.5f) - .put("index.similarity.my_similarity.discount_overlaps", false) - .build(); + .put("index.similarity.my_similarity.type", "BM25") + .put("index.similarity.my_similarity.k1", 2.0f) + .put("index.similarity.my_similarity.b", 1.5f) + .put("index.similarity.my_similarity.discount_overlaps", false) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(BM25SimilarityProvider.class)); @@ -93,18 +99,18 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_DFR() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "DFR") - .put("index.similarity.my_similarity.basic_model", "g") - .put("index.similarity.my_similarity.after_effect", "l") - .put("index.similarity.my_similarity.normalization", "h2") - .put("index.similarity.my_similarity.normalization.h2.c", 3f) - .build(); + .put("index.similarity.my_similarity.type", "DFR") + .put("index.similarity.my_similarity.basic_model", "g") + .put("index.similarity.my_similarity.after_effect", "l") + .put("index.similarity.my_similarity.normalization", "h2") + .put("index.similarity.my_similarity.normalization.h2.c", 3f) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DFRSimilarityProvider.class)); @@ -118,18 +124,18 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_IB() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "IB") - .put("index.similarity.my_similarity.distribution", "spl") - .put("index.similarity.my_similarity.lambda", "ttf") - .put("index.similarity.my_similarity.normalization", "h2") - .put("index.similarity.my_similarity.normalization.h2.c", 3f) - .build(); + .put("index.similarity.my_similarity.type", "IB") + .put("index.similarity.my_similarity.distribution", "spl") + .put("index.similarity.my_similarity.lambda", "ttf") + .put("index.similarity.my_similarity.normalization", "h2") + .put("index.similarity.my_similarity.normalization.h2.c", 3f) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(IBSimilarityProvider.class)); @@ -143,15 +149,15 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "LMDirichlet") - .put("index.similarity.my_similarity.mu", 3000f) - .build(); + .put("index.similarity.my_similarity.type", "LMDirichlet") + .put("index.similarity.my_similarity.mu", 3000f) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMDirichletSimilarityProvider.class)); @@ -162,15 +168,15 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "LMJelinekMercer") - .put("index.similarity.my_similarity.lambda", 0.7f) - .build(); + .put("index.similarity.my_similarity.type", "LMJelinekMercer") + .put("index.similarity.my_similarity.lambda", 0.7f) + .build(); IndexService indexService = createIndex("foo", indexSettings); DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); @@ -178,4 +184,47 @@ public class SimilarityTests extends ESSingleNodeTestCase { LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } + + public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "unknown_similarity").endObject() + .endObject() + .endObject().endObject().string(); + + IndexService indexService = createIndex("foo"); + try { + indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + fail("Expected MappingParsingException"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for [field1]")); + } + } + + public void testSimilarityDefaultBackCompat() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1") + .field("similarity", "default") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0)) + .build(); + + DocumentMapperParser parser = createIndex("test_v2.x", settings).mapperService().documentMapperParser(); + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(ClassicSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().name(), equalTo("classic")); + + parser = createIndex("test_v3.x").mapperService().documentMapperParser(); + try { + parser.parse("type", new CompressedXContent(mapping)); + fail("Expected MappingParsingException"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("Unknown Similarity type [default] for [field1]")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java index ab6a10f3cd2..8912956489f 100644 --- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimilarityIT extends ESIntegTestCase { +public class SimilarityIT extends ESIntegTestCase { public void testCustomBM25Similarity() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); @@ -45,7 +45,7 @@ public class SimilarityIT extends ESIntegTestCase { .field("type", "string") .endObject() .startObject("field2") - .field("similarity", "default") + .field("similarity", "classic") .field("type", "string") .endObject() .endObject() diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index ddec26b8030..df37e7876c1 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -48,10 +48,10 @@ Here we configure the DFRSimilarity so it can be referenced as === Available similarities [float] -[[default-similarity]] -==== Default similarity +[[classic-similarity]] +==== Classic similarity -The default similarity that is based on the TF/IDF model. This +The classic similarity that is based on the TF/IDF model. This similarity has the following option: `discount_overlaps`:: @@ -59,7 +59,7 @@ similarity has the following option: 0 position increment) are ignored when computing norm. By default this is true, meaning overlap tokens do not count when computing norms. -Type name: `default` +Type name: `classic` [float] [[bm25]] diff --git a/docs/reference/mapping/params/similarity.asciidoc b/docs/reference/mapping/params/similarity.asciidoc index 393f654bcf1..a3fdef1d43b 100644 --- a/docs/reference/mapping/params/similarity.asciidoc +++ b/docs/reference/mapping/params/similarity.asciidoc @@ -15,7 +15,7 @@ similarities. For more details about this expert options, see the The only similarities which can be used out of the box, without any further configuration are: -`default`:: +`classic`:: The Default TF/IDF algorithm used by Elasticsearch and Lucene. See {defguide}/practical-scoring-function.html[Lucene’s Practical Scoring Function] for more information. @@ -49,6 +49,6 @@ PUT my_index } -------------------------------------------------- // AUTOSENSE -<1> The `default_field` uses the `default` similarity (ie TF/IDF). +<1> The `default_field` uses the `classic` similarity (ie TF/IDF). <2> The `bm25_field` uses the `BM25` similarity. diff --git a/docs/reference/mapping/types/string.asciidoc b/docs/reference/mapping/types/string.asciidoc index 95c682c696f..557f77d9b38 100644 --- a/docs/reference/mapping/types/string.asciidoc +++ b/docs/reference/mapping/types/string.asciidoc @@ -166,7 +166,7 @@ Defaults depend on the <> setting: <>:: Which scoring algorithm or _similarity_ should be used. Defaults - to `default`, which uses TF/IDF. + to `classic`, which uses TF/IDF. <>:: diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 83107cbfa9f..88d77a5206a 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -205,6 +205,10 @@ cluster settings please use the settings update API and set their superseded key The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log growth use `index.translog.flush_threshold_size` instead. +==== Similarity settings + +The 'default' similarity has been renamed to 'classic'. + [[breaking_30_mapping_changes]] === Mapping changes From fcfd98e9e89231d748ae66c81791b0b08b0c6200 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 16:44:35 +0100 Subject: [PATCH 194/322] Drop support for simple translog and hard-wire buffer to 8kb Today we have two variants of translogs for indexing. We only recommend the buffered one which also has a 20% advantage in indexing speed. This commit removes the option and defaults to the buffered case. It also hard-wires the translog buffer to 8kb instead of 64kb. We used to adjust that buffer based on if the shard is active or not, this code has also been removed and instead we just keep an 8kb buffer arround. --- .../elasticsearch/cluster/ClusterModule.java | 1 - .../elasticsearch/index/shard/IndexShard.java | 12 +- .../translog/BufferingTranslogWriter.java | 177 ----------------- .../index/translog/Translog.java | 9 +- .../index/translog/TranslogConfig.java | 51 ++--- .../index/translog/TranslogWriter.java | 181 +++++++++++------- .../memory/IndexingMemoryController.java | 59 +----- .../index/translog/BufferedTranslogTests.java | 44 ----- .../index/translog/TranslogTests.java | 18 +- .../memory/IndexingMemoryControllerTests.java | 53 ++--- .../reference/index-modules/translog.asciidoc | 23 +-- docs/reference/migration/migrate_3_0.asciidoc | 3 +- .../elasticsearch/test/ESIntegTestCase.java | 1 - 13 files changed, 165 insertions(+), 467 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java delete mode 100644 core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 6f7eb20874f..6a4831a2eee 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -140,7 +140,6 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, Validator.EMPTY); registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER); registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY); registerIndexDynamicSetting(IndexMetaData.SETTING_READ_ONLY, Validator.EMPTY); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b0d19a2c0b2..fd8490ae8e0 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1016,7 +1016,7 @@ public class IndexShard extends AbstractIndexShardComponent { * Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than * the new buffering indexing size then we do a refresh to free up the heap. */ - public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { + public void updateBufferSize(ByteSizeValue shardIndexingBufferSize) { final EngineConfig config = engineConfig; final ByteSizeValue preValue = config.getIndexingBufferSize(); @@ -1054,8 +1054,6 @@ public class IndexShard extends AbstractIndexShardComponent { logger.debug(message); } } - - engine.getTranslog().updateBuffer(shardTranslogBufferSize); } /** @@ -1072,7 +1070,7 @@ public class IndexShard extends AbstractIndexShardComponent { if (engineOrNull != null && System.nanoTime() - engineOrNull.getLastWriteNanos() >= inactiveTimeNS) { boolean wasActive = active.getAndSet(false); if (wasActive) { - updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); + updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER); logger.debug("marking shard as inactive (inactive_time=[{}]) indexing wise", inactiveTime); indexEventListener.onShardInactive(this); } @@ -1161,12 +1159,6 @@ public class IndexShard extends AbstractIndexShardComponent { this.flushOnClose = flushOnClose; } - TranslogWriter.Type type = TranslogWriter.Type.fromString(settings.get(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, translogConfig.getType().name())); - if (type != translogConfig.getType()) { - logger.info("updating type from [{}] to [{}]", translogConfig.getType(), type); - translogConfig.setType(type); - } - final Translog.Durabilty durabilty = getFromSettings(logger, settings, translogConfig.getDurabilty()); if (durabilty != translogConfig.getDurabilty()) { logger.info("updating durability from [{}] to [{}]", translogConfig.getDurabilty(), durabilty); diff --git a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java deleted file mode 100644 index a2eb0bff646..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Channels; -import org.elasticsearch.common.util.concurrent.ReleasableLock; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; - -/** - */ -public final class BufferingTranslogWriter extends TranslogWriter { - private byte[] buffer; - private int bufferCount; - private WrapperOutputStream bufferOs = new WrapperOutputStream(); - - /* the total offset of this file including the bytes written to the file as well as into the buffer */ - private volatile long totalOffset; - - public BufferingTranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - super(shardId, generation, channelReference); - this.buffer = new byte[bufferSize]; - this.totalOffset = writtenOffset; - } - - @Override - public Translog.Location add(BytesReference data) throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - final long offset = totalOffset; - if (data.length() >= buffer.length) { - flush(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - try { - data.writeTo(channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += data.length(); - totalOffset += data.length(); - } else { - if (data.length() > buffer.length - bufferCount) { - flush(); - } - data.writeTo(bufferOs); - totalOffset += data.length(); - } - operationCounter++; - return new Translog.Location(generation, offset, data.length()); - } - } - - protected final void flush() throws IOException { - assert writeLock.isHeldByCurrentThread(); - if (bufferCount > 0) { - ensureOpen(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - final int bufferSize = bufferCount; - try { - Channels.writeToChannel(buffer, 0, bufferSize, channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += bufferSize; - bufferCount = 0; - } - } - - @Override - protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - if (position >= writtenOffset) { - assert targetBuffer.hasArray() : "buffer must have array"; - final int sourcePosition = (int) (position - writtenOffset); - System.arraycopy(buffer, sourcePosition, - targetBuffer.array(), targetBuffer.position(), targetBuffer.limit()); - targetBuffer.position(targetBuffer.limit()); - return; - } - } - // we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete - // for the requested location. - Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); - } - - @Override - public boolean syncNeeded() { - return totalOffset != lastSyncedOffset; - } - - @Override - public synchronized void sync() throws IOException { - if (syncNeeded()) { - ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event - channelReference.incRef(); - try { - final long offsetToSync; - final int opsCounter; - try (ReleasableLock lock = writeLock.acquire()) { - flush(); - offsetToSync = totalOffset; - opsCounter = operationCounter; - } - // we can do this outside of the write lock but we have to protect from - // concurrent syncs - ensureOpen(); // just for kicks - the checkpoint happens or not either way - try { - checkpoint(offsetToSync, opsCounter, channelReference); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } finally { - channelReference.decRef(); - } - } - } - - - public void updateBufferSize(int bufferSize) { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - if (this.buffer.length != bufferSize) { - flush(); - this.buffer = new byte[bufferSize]; - } - } catch (IOException e) { - throw new TranslogException(shardId, "failed to flush", e); - } - } - - class WrapperOutputStream extends OutputStream { - - @Override - public void write(int b) throws IOException { - buffer[bufferCount++] = (byte) b; - } - - @Override - public void write(byte[] b, int off, int len) throws IOException { - // we do safety checked when we decide to use this stream... - System.arraycopy(b, off, buffer, bufferCount, len); - bufferCount += len; - } - } - - @Override - public long sizeInBytes() { - return totalOffset; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 17c7f753137..7da54ed8a37 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -280,13 +280,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC throw new IllegalArgumentException("can't parse id from file: " + fileName); } - public void updateBuffer(ByteSizeValue bufferSize) { - config.setBufferSize(bufferSize.bytesAsInt()); - try (ReleasableLock lock = writeLock.acquire()) { - current.updateBufferSize(config.getBufferSize()); - } - } - /** Returns {@code true} if this {@code Translog} is still open. */ public boolean isOpen() { return closed.get() == false; @@ -367,7 +360,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC TranslogWriter createWriter(long fileGeneration) throws IOException { TranslogWriter newFile; try { - newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSize(), getChannelFactory()); + newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), getChannelFactory(), config.getBufferSize()); } catch (IOException e) { throw new TranslogException(shardId, "failed to create new translog file", e); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java index ca479bec080..442792f3132 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java @@ -20,13 +20,13 @@ package org.elasticsearch.index.translog; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog.TranslogGeneration; -import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; import java.nio.file.Path; @@ -39,21 +39,19 @@ import java.nio.file.Path; public final class TranslogConfig { public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; - public static final String INDEX_TRANSLOG_FS_TYPE = "index.translog.fs.type"; - public static final String INDEX_TRANSLOG_BUFFER_SIZE = "index.translog.fs.buffer_size"; public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; + public static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(8, ByteSizeUnit.KB); private final TimeValue syncInterval; private final BigArrays bigArrays; private final ThreadPool threadPool; private final boolean syncOnEachOperation; - private volatile int bufferSize; private volatile TranslogGeneration translogGeneration; private volatile Translog.Durabilty durabilty = Translog.Durabilty.REQUEST; - private volatile TranslogWriter.Type type; private final IndexSettings indexSettings; private final ShardId shardId; private final Path translogPath; + private final ByteSizeValue bufferSize; /** * Creates a new TranslogConfig instance @@ -65,14 +63,17 @@ public final class TranslogConfig { * @param threadPool a {@link ThreadPool} to schedule async sync durability */ public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool) { + this(shardId, translogPath, indexSettings, durabilty, bigArrays, threadPool, DEFAULT_BUFFER_SIZE); + } + + TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool, ByteSizeValue bufferSize) { + this.bufferSize = bufferSize; this.indexSettings = indexSettings; this.shardId = shardId; this.translogPath = translogPath; this.durabilty = durabilty; this.threadPool = threadPool; this.bigArrays = bigArrays; - this.type = TranslogWriter.Type.fromString(indexSettings.getSettings().get(INDEX_TRANSLOG_FS_TYPE, TranslogWriter.Type.BUFFERED.name())); - this.bufferSize = (int) indexSettings.getSettings().getAsBytesSize(INDEX_TRANSLOG_BUFFER_SIZE, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER).bytes(); // Not really interesting, updated by IndexingMemoryController... syncInterval = indexSettings.getSettings().getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); if (syncInterval.millis() > 0 && threadPool != null) { @@ -84,6 +85,7 @@ public final class TranslogConfig { } } + /** * Returns a {@link ThreadPool} to schedule async durability operations */ @@ -105,20 +107,6 @@ public final class TranslogConfig { this.durabilty = durabilty; } - /** - * Returns the translog type - */ - public TranslogWriter.Type getType() { - return type; - } - - /** - * Sets the TranslogType for this Translog. The change will affect all subsequent translog files. - */ - public void setType(TranslogWriter.Type type) { - this.type = type; - } - /** * Returns true iff each low level operation shoudl be fsynced */ @@ -126,20 +114,6 @@ public final class TranslogConfig { return syncOnEachOperation; } - /** - * Retruns the current translog buffer size. - */ - public int getBufferSize() { - return bufferSize; - } - - /** - * Sets the current buffer size - for setting a live setting use {@link Translog#updateBuffer(ByteSizeValue)} - */ - public void setBufferSize(int bufferSize) { - this.bufferSize = bufferSize; - } - /** * Returns the current async fsync interval */ @@ -192,4 +166,11 @@ public final class TranslogConfig { public void setTranslogGeneration(TranslogGeneration translogGeneration) { this.translogGeneration = translogGeneration; } + + /** + * The translog buffer size. Default is 8kb + */ + public ByteSizeValue getBufferSize() { + return bufferSize; + } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 975d722b085..49392088692 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -28,11 +28,14 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.shard.ShardId; +import java.io.BufferedOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Files; @@ -49,30 +52,39 @@ public class TranslogWriter extends TranslogReader { public static final int VERSION_CHECKPOINTS = 2; // since 2.0 we have checkpoints? public static final int VERSION = VERSION_CHECKPOINTS; - protected final ShardId shardId; - protected final ReleasableLock readLock; - protected final ReleasableLock writeLock; + private final ShardId shardId; + private final ReleasableLock readLock; + private final ReleasableLock writeLock; /* the offset in bytes that was written when the file was last synced*/ - protected volatile long lastSyncedOffset; + private volatile long lastSyncedOffset; /* the number of translog operations written to this file */ - protected volatile int operationCounter; + private volatile int operationCounter; /* the offset in bytes written to the file */ - protected volatile long writtenOffset; + private volatile long writtenOffset; /* if we hit an exception that we can't recover from we assign it to this var and ship it with every AlreadyClosedException we throw */ private volatile Throwable tragedy; + private final byte[] buffer; + private int bufferCount; + private WrapperOutputStream bufferOs = new WrapperOutputStream(); - public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference) throws IOException { + /* the total offset of this file including the bytes written to the file as well as into the buffer */ + private volatile long totalOffset; + + + public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, ByteSizeValue bufferSize) throws IOException { super(generation, channelReference, channelReference.getChannel().position()); this.shardId = shardId; ReadWriteLock rwl = new ReentrantReadWriteLock(); readLock = new ReleasableLock(rwl.readLock()); writeLock = new ReleasableLock(rwl.writeLock()); this.writtenOffset = channelReference.getChannel().position(); - this.lastSyncedOffset = channelReference.getChannel().position();; + this.totalOffset = writtenOffset; + this.buffer = new byte[bufferSize.bytesAsInt()]; + this.lastSyncedOffset = channelReference.getChannel().position(); } - public static TranslogWriter create(Type type, ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, int bufferSize, ChannelFactory channelFactory) throws IOException { + public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = CodecUtil.headerLength(TRANSLOG_CODEC) + ref.length + RamUsageEstimator.NUM_BYTES_INT; final FileChannel channel = channelFactory.open(file); @@ -85,7 +97,7 @@ public class TranslogWriter extends TranslogReader { out.writeBytes(ref.bytes, ref.offset, ref.length); channel.force(false); writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE); - final TranslogWriter writer = type.create(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); + final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); return writer; } catch (Throwable throwable){ IOUtils.closeWhileHandlingException(channel); @@ -104,34 +116,7 @@ public class TranslogWriter extends TranslogReader { return tragedy; } - public enum Type { - - SIMPLE() { - @Override - public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - return new TranslogWriter(shardId, generation, channelReference); - } - }, - BUFFERED() { - @Override - public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - return new BufferingTranslogWriter(shardId, generation, channelReference, bufferSize); - } - }; - - public abstract TranslogWriter create(ShardId shardId, long generation, ChannelReference raf, int bufferSize) throws IOException; - - public static Type fromString(String type) { - if (SIMPLE.name().equalsIgnoreCase(type)) { - return SIMPLE; - } else if (BUFFERED.name().equalsIgnoreCase(type)) { - return BUFFERED; - } - throw new IllegalArgumentException("No translog fs type [" + type + "]"); - } - } - - protected final void closeWithTragicEvent(Throwable throwable) throws IOException { + private final void closeWithTragicEvent(Throwable throwable) throws IOException { try (ReleasableLock lock = writeLock.acquire()) { if (tragedy == null) { tragedy = throwable; @@ -146,38 +131,60 @@ public class TranslogWriter extends TranslogReader { * add the given bytes to the translog and return the location they were written at */ public Translog.Location add(BytesReference data) throws IOException { - final long position; try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); - position = writtenOffset; - try { - data.writeTo(channel); - } catch (Throwable e) { - closeWithTragicEvent(e); - throw e; + final long offset = totalOffset; + if (data.length() >= buffer.length) { + flush(); + // we use the channel to write, since on windows, writing to the RAF might not be reflected + // when reading through the channel + try { + data.writeTo(channel); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + writtenOffset += data.length(); + totalOffset += data.length(); + } else { + if (data.length() > buffer.length - bufferCount) { + flush(); + } + data.writeTo(bufferOs); + totalOffset += data.length(); } - writtenOffset = writtenOffset + data.length(); - operationCounter++;; + operationCounter++; + return new Translog.Location(generation, offset, data.length()); } - return new Translog.Location(generation, position, data.length()); - } - - /** - * change the size of the internal buffer if relevant - */ - public void updateBufferSize(int bufferSize) throws TranslogException { } /** * write all buffered ops to disk and fsync file */ - public synchronized void sync() throws IOException { // synchronized to ensure only one sync happens a time - // check if we really need to sync here... + public synchronized void sync() throws IOException { if (syncNeeded()) { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - checkpoint(writtenOffset, operationCounter, channelReference); - lastSyncedOffset = writtenOffset; + ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event + channelReference.incRef(); + try { + final long offsetToSync; + final int opsCounter; + try (ReleasableLock lock = writeLock.acquire()) { + flush(); + offsetToSync = totalOffset; + opsCounter = operationCounter; + } + // we can do this outside of the write lock but we have to protect from + // concurrent syncs + ensureOpen(); // just for kicks - the checkpoint happens or not either way + try { + checkpoint(offsetToSync, opsCounter, channelReference); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + lastSyncedOffset = offsetToSync; + } finally { + channelReference.decRef(); } } } @@ -185,9 +192,7 @@ public class TranslogWriter extends TranslogReader { /** * returns true if there are buffered ops */ - public boolean syncNeeded() { - return writtenOffset != lastSyncedOffset; // by default nothing is buffered - } + public boolean syncNeeded() { return totalOffset != lastSyncedOffset; } @Override public int totalOperations() { @@ -196,14 +201,29 @@ public class TranslogWriter extends TranslogReader { @Override public long sizeInBytes() { - return writtenOffset; + return totalOffset; } /** * Flushes the buffer if the translog is buffered. */ - protected void flush() throws IOException { + private final void flush() throws IOException { + assert writeLock.isHeldByCurrentThread(); + if (bufferCount > 0) { + ensureOpen(); + // we use the channel to write, since on windows, writing to the RAF might not be reflected + // when reading through the channel + final int bufferSize = bufferCount; + try { + Channels.writeToChannel(buffer, 0, bufferSize, channel); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + writtenOffset += bufferSize; + bufferCount = 0; + } } /** @@ -292,13 +312,23 @@ public class TranslogWriter extends TranslogReader { } @Override - protected void readBytes(ByteBuffer buffer, long position) throws IOException { + protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { try (ReleasableLock lock = readLock.acquire()) { - Channels.readFromFileChannelWithEofException(channel, position, buffer); + if (position >= writtenOffset) { + assert targetBuffer.hasArray() : "buffer must have array"; + final int sourcePosition = (int) (position - writtenOffset); + System.arraycopy(buffer, sourcePosition, + targetBuffer.array(), targetBuffer.position(), targetBuffer.limit()); + targetBuffer.position(targetBuffer.limit()); + return; + } } + // we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete + // for the requested location. + Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - protected synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { + private synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { channelReference.getChannel().force(false); writeCheckpoint(lastSyncPosition, operationCounter, channelReference.getPath().getParent(), channelReference.getGeneration(), StandardOpenOption.WRITE); } @@ -324,4 +354,19 @@ public class TranslogWriter extends TranslogReader { throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed", tragedy); } } + + class WrapperOutputStream extends OutputStream { + + @Override + public void write(int b) throws IOException { + buffer[bufferCount++] = (byte) b; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + // we do safety checked when we decide to use this stream... + System.arraycopy(b, off, buffer, bufferCount, len); + bufferCount += len; + } + } } diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index 72b951cadd5..a72c115835c 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -58,15 +58,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponentindices.memory.translog_buffer_size is a %, to set a floor on the actual size in bytes (default: 256 KB). */ - public static final String MIN_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_translog_buffer_size"; - - /** Only applies when indices.memory.translog_buffer_size is a %, to set a ceiling on the actual size in bytes (default: not set). */ - public static final String MAX_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.max_translog_buffer_size"; - /** Sets a floor on the per-shard translog buffer size (default: 2 KB). */ public static final String MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_shard_translog_buffer_size"; @@ -88,11 +79,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent maxTranslogBuffer.bytes()) { - translogBuffer = maxTranslogBuffer; - } - } else { - translogBuffer = ByteSizeValue.parseBytesSizeValue(translogBufferSetting, TRANSLOG_BUFFER_SIZE_SETTING); - } - this.translogBuffer = translogBuffer; - this.minShardTranslogBufferSize = this.settings.getAsBytesSize(MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(2, ByteSizeUnit.KB)); - this.maxShardTranslogBufferSize = this.settings.getAsBytesSize(MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(64, ByteSizeUnit.KB)); - // we need to have this relatively small to move a shard from inactive to active fast (enough) this.interval = this.settings.getAsTime(SHARD_INACTIVE_INTERVAL_TIME_SETTING, TimeValue.timeValueSeconds(30)); @@ -192,14 +157,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent availableShards() { List availableShards = new ArrayList<>(); @@ -220,9 +177,9 @@ public class IndexingMemoryController extends AbstractLifecycleComponent maxShardTranslogBufferSize.bytes()) { - shardTranslogBufferSize = maxShardTranslogBufferSize; - } - - logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize); + logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize); for (IndexShard shard : activeShards) { - updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize); + updateShardBuffers(shard, shardIndexingBufferSize); } } } diff --git a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java deleted file mode 100644 index b021f3252d6..00000000000 --- a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.test.IndexSettingsModule; - -import java.nio.file.Path; - -/** - * - */ -public class BufferedTranslogTests extends TranslogTests { - - @Override - protected TranslogConfig getTranslogConfig(Path path) { - Settings build = Settings.settingsBuilder() - .put("index.translog.fs.type", TranslogWriter.Type.BUFFERED.name()) - .put("index.translog.fs.buffer_size", 10 + randomInt(128 * 1024), ByteSizeUnit.BYTES) - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 3173f7c5dc9..db9f212b568 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -36,6 +36,8 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -132,12 +134,12 @@ public class TranslogTests extends ESTestCase { return new Translog(getTranslogConfig(path)); } - protected TranslogConfig getTranslogConfig(Path path) { + private TranslogConfig getTranslogConfig(Path path) { Settings build = Settings.settingsBuilder() - .put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, TranslogWriter.Type.SIMPLE.name()) .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .build(); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null); + ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); + return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null, bufferSize); } protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { @@ -1412,12 +1414,10 @@ public class TranslogTests extends ESTestCase { fail.set(true); try { Translog.Location location = translog.add(new Translog.Index("test", "2", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); - if (config.getType() == TranslogWriter.Type.BUFFERED) { // the buffered case will fail on the add if we exceed the buffer or will fail on the flush once we sync - if (randomBoolean()) { - translog.ensureSynced(location); - } else { - translog.sync(); - } + if (randomBoolean()) { + translog.ensureSynced(location); + } else { + translog.sync(); } //TODO once we have a mock FS that can simulate we can also fail on plain sync fail("WTF"); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java index 83c7be0374b..d980c3c598d 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java @@ -45,7 +45,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { final static ByteSizeValue INACTIVE = new ByteSizeValue(-1); final Map indexingBuffers = new HashMap<>(); - final Map translogBuffers = new HashMap<>(); final Map lastIndexTimeNanos = new HashMap<>(); final Set activeShards = new HashSet<>(); @@ -63,17 +62,14 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { public void deleteShard(IndexShard id) { indexingBuffers.remove(id); - translogBuffers.remove(id); } - public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) { + public void assertBuffers(IndexShard id, ByteSizeValue indexing) { assertThat(indexingBuffers.get(id), equalTo(indexing)); - assertThat(translogBuffers.get(id), equalTo(translog)); } public void assertInactive(IndexShard id) { assertThat(indexingBuffers.get(id), equalTo(INACTIVE)); - assertThat(translogBuffers.get(id), equalTo(INACTIVE)); } @Override @@ -92,9 +88,8 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { } @Override - protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { + protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize) { indexingBuffers.put(shard, shardIndexingBufferSize); - translogBuffers.put(shard, shardTranslogBufferSize); } @Override @@ -105,7 +100,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { return true; } else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) { indexingBuffers.put(shard, INACTIVE); - translogBuffers.put(shard, INACTIVE); activeShards.remove(shard); return true; } else { @@ -122,7 +116,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { if (indexingBuffers.containsKey(shard) == false) { // First time we are seeing this shard; start it off with inactive buffers as IndexShard does: indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER); - translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); } activeShards.add(shard); forceCheck(); @@ -135,22 +128,21 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb").build()); IndexShard shard0 = test.getShard(0); controller.simulateIndexing(shard0); - controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K // add another shard IndexShard shard1 = test.getShard(1); controller.simulateIndexing(shard1); - controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); - controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); + controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB)); + controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB)); // remove first shard controller.deleteShard(shard0); controller.forceCheck(); - controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K // remove second shard controller.deleteShard(shard1); @@ -159,7 +151,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { // add a new one IndexShard shard2 = test.getShard(2); controller.simulateIndexing(shard2); - controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB)); // translog is maxed at 64K } public void testActiveInactive() { @@ -169,7 +161,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb") .put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s") .build()); @@ -177,8 +168,8 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { controller.simulateIndexing(shard0); IndexShard shard1 = test.getShard(1); controller.simulateIndexing(shard1); - controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); - controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); + controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB)); + controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB)); // index into both shards, move the clock and see that they are still active controller.simulateIndexing(shard0); @@ -193,12 +184,12 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { // index into one shard only, see it becomes active controller.simulateIndexing(shard0); - controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB)); controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment but not enough to become inactive controller.forceCheck(); - controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB)); controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment some more @@ -209,13 +200,12 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { // index some and shard becomes immediately active controller.simulateIndexing(shard1); controller.assertInactive(shard0); - controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB)); } public void testMinShardBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") .put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb") .put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build()); @@ -225,7 +215,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { public void testMaxShardBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") .put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb") .put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build()); @@ -235,34 +224,26 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { public void testRelativeBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%") .build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB))); - assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } public void testMinBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%") - .put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb") - .put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); + .put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); - assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } public void testMaxBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%") - .put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb") - .put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); + .put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); - assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) { @@ -273,7 +254,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { controller.simulateIndexing(shard0); IndexShard shard1 = test.getShard(1); controller.simulateIndexing(shard1); - controller.assertBuffers(shard0, indexBufferSize, translogBufferSize); - controller.assertBuffers(shard1, indexBufferSize, translogBufferSize); + controller.assertBuffers(shard0, indexBufferSize); + controller.assertBuffers(shard1, indexBufferSize); } } diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index b8ada9a55a3..de72bed7ac8 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -70,25 +70,4 @@ update, or bulk request. This setting accepts the following parameters: `fsync` and commit in the background every `sync_interval`. In the event of hardware failure, all acknowledged writes since the last automatic commit will be discarded. --- - -`index.translog.fs.type`:: -+ --- - -Whether to buffer writes to the transaction log in memory or not. This -setting accepts the following parameters: - -`buffered`:: - - (default) Translog writes first go to a 64kB buffer in memory, - and are only written to the disk when the buffer is full, or when an - `fsync` is triggered by a write request or the `sync_interval`. - -`simple`:: - - Translog writes are written to the file system immediately, without - buffering. However, these writes will only be persisted to disk when an - `fsync` and commit is triggered by a write request or the `sync_interval`. - --- +-- \ No newline at end of file diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 83107cbfa9f..8b6775b082c 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -203,7 +203,8 @@ cluster settings please use the settings update API and set their superseded key ==== Translog settings The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log -growth use `index.translog.flush_threshold_size` instead. +growth use `index.translog.flush_threshold_size` instead. Changing the translog type with `index.translog.fs.type` is not supported +anymore, the `buffered` implementation is now the only available option and uses a fixed `8kb` buffer. [[breaking_30_mapping_changes]] === Mapping changes diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index f2c7d7c4772..b28842b1976 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -522,7 +522,6 @@ public abstract class ESIntegTestCase extends ESTestCase { } if (random.nextBoolean()) { - builder.put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, RandomPicks.randomFrom(random, TranslogWriter.Type.values())); if (rarely(random)) { builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op } else { From cf52e96c42ee3eaaef9dfd3be5a547de6c33705d Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 21 Dec 2015 17:01:28 +0100 Subject: [PATCH 195/322] Upgrade to lucene-5.5.0-snapshot-1721183. Some files that implement or use the Scorer API had to be changed because of https://issues.apache.org/jira/browse/LUCENE-6919. --- buildSrc/version.properties | 2 +- .../elasticsearch/common/lucene/Lucene.java | 17 ++--- .../common/lucene/all/AllTermQuery.java | 17 ++--- .../lucene/index/FilterableTermsEnum.java | 6 +- .../common/lucene/search/EmptyScorer.java | 67 ------------------- .../function/CustomBoostFactorScorer.java | 48 +++++++------ .../function/FiltersFunctionScoreQuery.java | 2 +- .../search/function/ScriptScoreFunction.java | 13 +--- .../index/cache/bitset/BitsetFilterCache.java | 7 +- .../index/fielddata/IndexFieldData.java | 4 +- .../index/mapper/DocumentMapper.java | 8 +-- .../search/geo/GeoDistanceRangeQuery.java | 11 +-- .../bucket/BestDocsDeferringCollector.java | 13 +--- .../children/ParentToChildrenAggregator.java | 5 +- .../bucket/nested/NestedAggregator.java | 8 ++- .../search/fetch/FetchPhase.java | 6 +- .../fetch/innerhits/InnerHitsContext.java | 5 +- .../MatchedQueriesFetchSubPhase.java | 4 +- .../search/profile/ProfileScorer.java | 66 ++++++++++-------- .../elasticsearch/bootstrap/security.policy | 2 +- .../bootstrap/test-framework.policy | 2 +- .../innerhits/NestedChildrenFilterTests.java | 2 +- ...ers-common-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ers-common-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ard-codecs-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ard-codecs-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ucene-core-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ucene-core-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...e-grouping-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...e-grouping-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ighlighter-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ighlighter-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ucene-join-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ucene-join-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ene-memory-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ene-memory-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ucene-misc-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ucene-misc-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ne-queries-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ne-queries-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ueryparser-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ueryparser-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ne-sandbox-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ne-sandbox-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ne-spatial-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ne-spatial-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...-spatial3d-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...-spatial3d-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...ne-suggest-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...ne-suggest-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...xpressions-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...xpressions-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...lyzers-icu-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...lyzers-icu-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...s-kuromoji-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...s-kuromoji-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...s-phonetic-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...s-phonetic-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...rs-smartcn-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...rs-smartcn-5.5.0-snapshot-1721183.jar.sha1 | 1 + ...rs-stempel-5.5.0-snapshot-1719088.jar.sha1 | 1 - ...rs-stempel-5.5.0-snapshot-1721183.jar.sha1 | 1 + 62 files changed, 146 insertions(+), 209 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java delete mode 100644 distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index e33383afa23..e073730fe12 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 3.0.0-SNAPSHOT -lucene = 5.5.0-snapshot-1719088 +lucene = 5.5.0-snapshot-1721183 # optional dependencies spatial4j = 0.5 diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 91eb7b86d57..558e92c4fb8 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -284,7 +284,8 @@ public class Lucene { continue; } final Bits liveDocs = context.reader().getLiveDocs(); - for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) { + final DocIdSetIterator iterator = scorer.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { if (liveDocs == null || liveDocs.get(doc)) { return true; } @@ -667,19 +668,11 @@ public class Lucene { throw new IllegalStateException(message); } @Override - public int advance(int arg0) throws IOException { - throw new IllegalStateException(message); - } - @Override - public long cost() { - throw new IllegalStateException(message); - } - @Override public int docID() { throw new IllegalStateException(message); } @Override - public int nextDoc() throws IOException { + public DocIdSetIterator iterator() { throw new IllegalStateException(message); } }; @@ -757,10 +750,10 @@ public class Lucene { if (scorer == null) { return new Bits.MatchNoBits(maxDoc); } - final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator(); + final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); final DocIdSetIterator iterator; if (twoPhase == null) { - iterator = scorer; + iterator = scorer.iterator(); } else { iterator = twoPhase.approximation(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 4fe90aed9e4..c3ea39a06a2 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -120,7 +121,7 @@ public final class AllTermQuery extends Query { public Explanation explain(LeafReaderContext context, int doc) throws IOException { AllTermScorer scorer = scorer(context); if (scorer != null) { - int newDoc = scorer.advance(doc); + int newDoc = scorer.iterator().advance(doc); if (newDoc == doc) { float score = scorer.score(); float freq = scorer.freq(); @@ -213,18 +214,8 @@ public final class AllTermQuery extends Query { } @Override - public int nextDoc() throws IOException { - return postings.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return postings.advance(target); - } - - @Override - public long cost() { - return postings.cost(); + public DocIdSetIterator iterator() { + return postings; } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 47ed0dbe3f4..0aab078d4ef 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FilteredDocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; @@ -99,11 +100,12 @@ public class FilterableTermsEnum extends TermsEnum { } BitSet bits = null; if (weight != null) { - DocIdSetIterator docs = weight.scorer(context); - if (docs == null) { + Scorer scorer = weight.scorer(context); + if (scorer == null) { // fully filtered, none matching, no need to iterate on this continue; } + DocIdSetIterator docs = scorer.iterator(); // we want to force apply deleted docs final Bits liveDocs = context.reader().getLiveDocs(); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java deleted file mode 100644 index 91627361ece..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.lucene.search; - -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; - -import java.io.IOException; - -/** - * - */ -public class EmptyScorer extends Scorer { - - private int docId = -1; - - public EmptyScorer(Weight weight) { - super(weight); - } - - @Override - public float score() throws IOException { - throw new UnsupportedOperationException("Should never be called"); - } - - @Override - public int freq() throws IOException { - throw new UnsupportedOperationException("Should never be called"); - } - - @Override - public int docID() { - return docId; - } - - @Override - public int nextDoc() throws IOException { - assert docId != NO_MORE_DOCS; - return docId = NO_MORE_DOCS; - } - - @Override - public int advance(int target) throws IOException { - return slowAdvance(target); - } - - @Override - public long cost() { - return 0; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java index 709c7df7898..0e077804974 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.lucene.search.function; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; @@ -27,6 +28,7 @@ import java.io.IOException; abstract class CustomBoostFactorScorer extends Scorer { final Scorer scorer; + final DocIdSetIterator iterator; final float maxBoost; final CombineFunction scoreCombiner; @@ -42,6 +44,7 @@ abstract class CustomBoostFactorScorer extends Scorer { nextDoc = new MinScoreNextDoc(); } this.scorer = scorer; + this.iterator = scorer.iterator(); this.maxBoost = maxBoost; this.scoreCombiner = scoreCombiner; this.minScore = minScore; @@ -53,13 +56,25 @@ abstract class CustomBoostFactorScorer extends Scorer { } @Override - public int advance(int target) throws IOException { - return nextDoc.advance(target); - } - - @Override - public int nextDoc() throws IOException { - return nextDoc.nextDoc(); + public DocIdSetIterator iterator() { + return new DocIdSetIterator() { + @Override + public int nextDoc() throws IOException { + return nextDoc.nextDoc(); + } + @Override + public int advance(int target) throws IOException { + return nextDoc.advance(target); + } + @Override + public long cost() { + return iterator.cost(); + } + @Override + public int docID() { + return iterator.docID(); + } + }; } public abstract float innerScore() throws IOException; @@ -74,11 +89,6 @@ abstract class CustomBoostFactorScorer extends Scorer { return scorer.freq(); } - @Override - public long cost() { - return scorer.cost(); - } - public interface NextDoc { public int advance(int target) throws IOException; @@ -94,8 +104,8 @@ abstract class CustomBoostFactorScorer extends Scorer { public int nextDoc() throws IOException { int doc; do { - doc = scorer.nextDoc(); - if (doc == NO_MORE_DOCS) { + doc = iterator.nextDoc(); + if (doc == DocIdSetIterator.NO_MORE_DOCS) { return doc; } currentScore = innerScore(); @@ -110,13 +120,13 @@ abstract class CustomBoostFactorScorer extends Scorer { @Override public int advance(int target) throws IOException { - int doc = scorer.advance(target); - if (doc == NO_MORE_DOCS) { + int doc = iterator.advance(target); + if (doc == DocIdSetIterator.NO_MORE_DOCS) { return doc; } currentScore = innerScore(); if (currentScore < minScore) { - return scorer.nextDoc(); + return iterator.nextDoc(); } return doc; } @@ -126,7 +136,7 @@ abstract class CustomBoostFactorScorer extends Scorer { @Override public int nextDoc() throws IOException { - return scorer.nextDoc(); + return iterator.nextDoc(); } @Override @@ -136,7 +146,7 @@ abstract class CustomBoostFactorScorer extends Scorer { @Override public int advance(int target) throws IOException { - return scorer.advance(target); + return iterator.advance(target); } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 69cf2bcf684..4a6ee941f4f 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -231,7 +231,7 @@ public class FiltersFunctionScoreQuery extends Query { } FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context); - int actualDoc = scorer.advance(doc); + int actualDoc = scorer.iterator.advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, subQueryExpl.getValue()); Explanation factorExplanation = Explanation.match( diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 9013b4b60e0..f027b7c58cb 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Scorer; import org.elasticsearch.script.ExplainableSearchScript; @@ -57,19 +58,9 @@ public class ScriptScoreFunction extends ScoreFunction { } @Override - public int nextDoc() throws IOException { + public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); } - - @Override - public int advance(int target) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public long cost() { - return 1; - } } private final Script sScript; diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index ceac3ca15c5..4e9ecf569d0 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.Accountable; @@ -127,12 +128,12 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(query, false); - final DocIdSetIterator it = weight.scorer(context); + Scorer s = weight.scorer(context); final BitSet bitSet; - if (it == null) { + if (s == null) { bitSet = null; } else { - bitSet = BitSet.of(it, context.reader().maxDoc()); + bitSet = BitSet.of(s.iterator(), context.reader().maxDoc()); } Value value = new Value(bitSet, shardId); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 7d2689dc157..fb01a374d2f 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -139,7 +140,8 @@ public interface IndexFieldData extends IndexCompone * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { - return innerFilter.scorer(ctx); + Scorer s = innerFilter.scorer(ctx); + return s == null ? null : s.iterator(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 0c813142dcd..756bd486316 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -20,8 +20,8 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; @@ -297,12 +297,12 @@ public class DocumentMapper implements ToXContent { // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. final Weight nestedWeight = filter.createWeight(sc.searcher(), false); - DocIdSetIterator iterator = nestedWeight.scorer(context); - if (iterator == null) { + Scorer scorer = nestedWeight.scorer(context); + if (scorer == null) { continue; } - if (iterator.advance(nestedDocId) == nestedDocId) { + if (scorer.iterator().advance(nestedDocId) == nestedDocId) { if (nestedObjectMapper == null) { nestedObjectMapper = objectMapper; } else { diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java index f68699ac2a2..ea8ff8ca6d3 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java @@ -144,14 +144,15 @@ public class GeoDistanceRangeQuery extends Query { public Scorer scorer(LeafReaderContext context) throws IOException { final DocIdSetIterator approximation; if (boundingBoxWeight != null) { - approximation = boundingBoxWeight.scorer(context); + Scorer s = boundingBoxWeight.scorer(context); + if (s == null) { + // if the approximation does not match anything, we're done + return null; + } + approximation = s.iterator(); } else { approximation = DocIdSetIterator.all(context.reader().maxDoc()); } - if (approximation == null) { - // if the approximation does not match anything, we're done - return null; - } final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approximation) { @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestDocsDeferringCollector.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestDocsDeferringCollector.java index 22ff6df81f0..0d5bb5ce50d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestDocsDeferringCollector.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestDocsDeferringCollector.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; @@ -277,17 +278,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme } @Override - public int nextDoc() throws IOException { - throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()"); - } - - @Override - public int advance(int target) throws IOException { - throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()"); - } - - @Override - public long cost() { + public DocIdSetIterator iterator() { throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()"); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java index 537a02909a9..63819b978d2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java @@ -134,10 +134,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { protected void doPostCollection() throws IOException { IndexReader indexReader = context().searchContext().searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { - DocIdSetIterator childDocsIter = childFilter.scorer(ctx); - if (childDocsIter == null) { + Scorer childDocsScorer = childFilter.scorer(ctx); + if (childDocsScorer == null) { continue; } + DocIdSetIterator childDocsIter = childDocsScorer.iterator(); final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 8c227694bf3..fa23cf8dd93 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; @@ -69,7 +70,12 @@ public class NestedAggregator extends SingleBucketAggregator { final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(childFilter, false); - childDocs = weight.scorer(ctx); + Scorer childDocsScorer = weight.scorer(ctx); + if (childDocsScorer == null) { + childDocs = null; + } else { + childDocs = childDocsScorer.iterator(); + } return new LeafBucketCollectorBase(sub, null) { @Override diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6addf2dc301..5019bab9128 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; @@ -314,11 +315,12 @@ public class FetchPhase implements SearchPhase { continue; } final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false); - DocIdSetIterator childIter = childWeight.scorer(subReaderContext); - if (childIter == null) { + Scorer childScorer = childWeight.scorer(subReaderContext); + if (childScorer == null) { current = nestedParentObjectMapper; continue; } + DocIdSetIterator childIter = childScorer.iterator(); BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index 371e897951e..125563cd098 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -221,10 +221,11 @@ public final class InnerHitsContext { return null; } - final DocIdSetIterator childrenIterator = childWeight.scorer(context); - if (childrenIterator == null) { + final Scorer childrenScorer = childWeight.scorer(context); + if (childrenScorer == null) { return null; } + DocIdSetIterator childrenIterator = childrenScorer.iterator(); final DocIdSetIterator it = new DocIdSetIterator() { int doc = -1; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java index de5294f690f..6adb01ad10c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java @@ -90,9 +90,9 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase { if (scorer == null) { continue; } - final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator(); + final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); if (twoPhase == null) { - if (scorer.advance(hitContext.docId()) == hitContext.docId()) { + if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) { matchedQueries.add(name); } } else { diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java index b0dc6f2cd4e..972d176ca10 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java @@ -49,26 +49,6 @@ final class ProfileScorer extends Scorer { return scorer.docID(); } - @Override - public int advance(int target) throws IOException { - profile.startTime(ProfileBreakdown.TimingType.ADVANCE); - try { - return scorer.advance(target); - } finally { - profile.stopAndRecordTime(); - } - } - - @Override - public int nextDoc() throws IOException { - profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); - try { - return scorer.nextDoc(); - } finally { - profile.stopAndRecordTime(); - } - } - @Override public float score() throws IOException { profile.startTime(ProfileBreakdown.TimingType.SCORE); @@ -84,11 +64,6 @@ final class ProfileScorer extends Scorer { return scorer.freq(); } - @Override - public long cost() { - return scorer.cost(); - } - @Override public Weight getWeight() { return profileWeight; @@ -100,8 +75,45 @@ final class ProfileScorer extends Scorer { } @Override - public TwoPhaseIterator asTwoPhaseIterator() { - final TwoPhaseIterator in = scorer.asTwoPhaseIterator(); + public DocIdSetIterator iterator() { + final DocIdSetIterator in = scorer.iterator(); + return new DocIdSetIterator() { + + @Override + public int advance(int target) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.ADVANCE); + try { + return in.advance(target); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int nextDoc() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); + try { + return in.nextDoc(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public long cost() { + return in.cost(); + } + }; + } + + @Override + public TwoPhaseIterator twoPhaseIterator() { + final TwoPhaseIterator in = scorer.twoPhaseIterator(); if (in == null) { return null; } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 26785010110..151c91f5be2 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1719088.jar}" { +grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" { // needed to allow MMapDirectory's "unmap hack" permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index b5f9c24d04f..419c666d55e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1719088.jar}" { +grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java index f00b72bfa8f..60810ee4df6 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java @@ -81,7 +81,7 @@ public class NestedChildrenFilterTests extends ESTestCase { int checkedParents = 0; final Weight parentsWeight = searcher.createNormalizedWeight(new TermQuery(new Term("type", "parent")), false); for (LeafReaderContext leaf : reader.leaves()) { - DocIdSetIterator parents = parentsWeight.scorer(leaf); + DocIdSetIterator parents = parentsWeight.scorer(leaf).iterator(); for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) { int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue(); hitContext.reset(null, leaf, parentDoc, searcher); diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 5d95f64a15f..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f2b9811a4f4a57a1b3a98bdc1e1b63476b9f628 \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..2edc39c7029 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +69e187ef1d2d9c9570363eb4186821e0341df5b8 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 0ae258b597a..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -038071889a5dbeb279e37fa46225e194139a427c \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..0b6a49a68e3 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +0fa00a45ff9bc6a4df44db81f2e4e44ea94bf88e \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index aee7c10cffd..00000000000 --- a/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b986d0ad8ee4dda8172a5a61875c47631e4b21d4 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..3ff27aff723 --- /dev/null +++ b/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +f6854c65c7f4c6d9de583f4daa4fd3ae8a3800f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index aa1011e007e..00000000000 --- a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f46574fbdfbcc81d936c77e15ba5b3af2c2b7253 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..9ffcb6d07cf --- /dev/null +++ b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e996e6c723eb415ba2cfa7f5e98bbf194a4918dd \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 561f17e773c..00000000000 --- a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f620262d667a294d390e8df7575cc2cca2626559 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..b126eebd88f --- /dev/null +++ b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +3b7a5d97b10885f16eb53deb15d64c942b9f9fdb \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 4735bdf1d2d..00000000000 --- a/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c44b07242fd706f6f7f14c9063a725e0e5b98cd \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..8313bac1acf --- /dev/null +++ b/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e4dda3eeb76e340aa4713a3b20d68c4a1504e505 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 9c19a6ad622..00000000000 --- a/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e33e0aa5fc227e90c8314f61b4cba1090035e33 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..1802f859ae0 --- /dev/null +++ b/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +800442a5d7612ce4c8748831871b4d436a50554e \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index c4a61bff68b..00000000000 --- a/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e416893f7b781239a15d3e2c7200ff26574d14de \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..1c543141bbf --- /dev/null +++ b/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +bdf184de9b5773c7af3ae908af78eeb1e512470c \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index edc56751403..00000000000 --- a/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b153b63b9333feedb18af2673eb6ccaf95bcc8bf \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..f3eb218b9e0 --- /dev/null +++ b/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +fc59de52bd2c7e420edfd235723cb8b0dd44e92d \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index eddd3d6cdcd..00000000000 --- a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0aa2758d70a79f2e0f33a87624fd9d31e155c864 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..4ce5c2024f0 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +1d341e6a4f11f3170773ccffdbe6815b45967e3d \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 571903cc72c..00000000000 --- a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -873c716ba629dae389b12ddb1aedf2f5c5f57fea \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..cf78d108a11 --- /dev/null +++ b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +a1b02c2b595ac92f45f0d2be03841a3a7fcae1f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 5e6a27b7cd1..00000000000 --- a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d7e47c2fb73c614cc5ca41529b2c273c73b0ce7 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..2634a93e82d --- /dev/null +++ b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e3ea422b56734329fb6974e9cf9f66478adb5793 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index cf841e18c5a..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4766305088797a66fe02d5aaa98e086867816e42 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..391d044c719 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +5eadbd4e63120b59ab6445e39489205f98420471 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 1fbb60a9d7a..00000000000 --- a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f0ee6fb780ea8aa9ec6d31e6a9cc7d48700bd2ca \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..f9f2bf5a43c --- /dev/null +++ b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +a336287e65d082535f02a8427666dbe46b1b9b74 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 50bb58f443d..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -787356d4ae6142bb8ca7e9713d0a281a797b57fb \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..a5332a9ca09 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +60e056d2dd04a81440482b047af0737bc41593d9 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 4942bbc6af3..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4e56ba76d6b23756b2bd4d9e42b2b00122cd4fa5 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..84b4b753063 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +1fce4e9b5c4482bb95e8b275c825d112640d6f1e \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 1ba2a93066d..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6ccac802dc1e4c177be043a173377cf5e517cff \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..429f8b59b3e --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +f104f306fef9d3033db026705043e9cbd145aba5 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 2b611862d41..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70ad9f6c3738727229867419d949527cc7789f62 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..a814cf5cb03 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +40b2034a6aed4c3fe0509016fab4f7bbb37a5fc8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index e28887afd56..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -75504fd906929700e7d11f9600e4a79de48e1090 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..af3c4a277ea --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e117a87f4338be80b0a052d2ce454d5086aa57f1 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 739ecc4eb8f..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9eeeeabeab89ec305e831d80bdcc7e85a1140fbb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..899769b0e29 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +703dd91fccdc1c4662c80e412a449097c0578d83 \ No newline at end of file From f81b12e327a20d7a61cb4aa6b067872fb48897ee Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 12:39:40 -0500 Subject: [PATCH 196/322] minimize accessiblity, remove unused threadpool --- plugins/repository-hdfs/build.gradle | 2 +- .../repositories/hdfs/HdfsBlobContainer.java | 8 +++---- .../repositories/hdfs/HdfsBlobStore.java | 22 +++++-------------- .../hdfs/HdfsPlugin.java | 5 +---- .../repositories/hdfs/HdfsRepository.java | 7 ++---- .../hdfs/HdfsRepositoryRestIT.java | 4 ++-- .../hdfs/HdfsTests.java | 3 ++- .../hdfs/TestingFs.java | 2 +- 8 files changed, 18 insertions(+), 35 deletions(-) rename plugins/repository-hdfs/src/main/java/org/elasticsearch/{plugin/hadoop => repositories}/hdfs/HdfsPlugin.java (96%) rename plugins/repository-hdfs/src/test/java/org/elasticsearch/{plugin/hadoop => repositories}/hdfs/HdfsRepositoryRestIT.java (94%) rename plugins/repository-hdfs/src/test/java/org/elasticsearch/{plugin/hadoop => repositories}/hdfs/HdfsTests.java (98%) rename plugins/repository-hdfs/src/test/java/org/elasticsearch/{plugin/hadoop => repositories}/hdfs/TestingFs.java (99%) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index f193fa2aba1..361166a2d0b 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -26,7 +26,7 @@ import java.nio.file.Paths esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' - classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin' + classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } versions << [ diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 47024796b03..723cfc2e8f5 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -41,12 +41,12 @@ import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.Map; -public class HdfsBlobContainer extends AbstractBlobContainer { +final class HdfsBlobContainer extends AbstractBlobContainer { - protected final HdfsBlobStore blobStore; - protected final Path path; + private final HdfsBlobStore blobStore; + private final Path path; - public HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) { + HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) { super(blobPath); this.blobStore = blobStore; this.path = path; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 815c0d1eff3..119908c313a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -28,23 +28,19 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.concurrent.Executor; -public class HdfsBlobStore extends AbstractComponent implements BlobStore { +final class HdfsBlobStore extends AbstractComponent implements BlobStore { private final FileContextFactory fcf; private final Path rootHdfsPath; - private final ThreadPool threadPool; private final int bufferSizeInBytes; - public HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path, ThreadPool threadPool) throws IOException { + HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path) throws IOException { super(settings); this.fcf = fcf; this.rootHdfsPath = path; - this.threadPool = threadPool; this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); @@ -68,19 +64,11 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { return rootHdfsPath.toUri().toString(); } - public FileContextFactory fileContextFactory() { + FileContextFactory fileContextFactory() { return fcf; } - public Path path() { - return rootHdfsPath; - } - - public Executor executor() { - return threadPool.executor(ThreadPool.Names.SNAPSHOT); - } - - public int bufferSizeInBytes() { + int bufferSizeInBytes() { return bufferSizeInBytes; } @@ -111,7 +99,7 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { } private Path translateToHdfsPath(BlobPath blobPath) { - Path path = path(); + Path path = rootHdfsPath; for (String p : blobPath) { path = new Path(path, p); } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java similarity index 96% rename from plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java rename to plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java index 87f4f6024d7..f09b69aac48 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.hadoop.hdfs; +package org.elasticsearch.repositories.hdfs; import java.io.IOException; import java.nio.file.Files; @@ -24,14 +24,11 @@ import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; -import org.apache.lucene.util.Constants; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.hdfs.HdfsRepository; - // Code public class HdfsPlugin extends Plugin { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 7fbbcdac2ac..c4b608dc82a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -49,7 +49,6 @@ import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.threadpool.ThreadPool; public class HdfsRepository extends BlobStoreRepository implements FileContextFactory { @@ -59,18 +58,16 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa private final ByteSizeValue chunkSize; private final boolean compress; private final RepositorySettings repositorySettings; - private final ThreadPool threadPool; private final String path; private final String uri; private FileContext fc; private HdfsBlobStore blobStore; @Inject - public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { + public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); this.repositorySettings = repositorySettings; - this.threadPool = threadPool; uri = repositorySettings.settings().get("uri", settings.get("uri")); path = repositorySettings.settings().get("path", settings.get("path")); @@ -112,7 +109,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa } }); logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); + blobStore = new HdfsBlobStore(settings, this, hdfsPath); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java similarity index 94% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java index 30d1aafcaba..db423cdd44f 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.hadoop.hdfs; +package org.elasticsearch.repositories.hdfs; import java.io.IOException; import java.util.Collection; @@ -24,8 +24,8 @@ import java.util.Collection; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java similarity index 98% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index d7c1a37c03d..b132296ce12 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.hadoop.hdfs; +package org.elasticsearch.repositories.hdfs; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java similarity index 99% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java index d39afe9cb74..c9c3c46c12c 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/TestingFs.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.hadoop.hdfs; +package org.elasticsearch.repositories.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DelegateToFileSystem; From 3ffd1a5219e3667fd460b054a6f2b721c7bd441f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 12:54:33 -0500 Subject: [PATCH 197/322] final --- .../java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java | 2 +- .../org/elasticsearch/repositories/hdfs/HdfsRepository.java | 2 +- .../java/org/elasticsearch/repositories/hdfs/SecurityUtils.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java index f09b69aac48..32d787442a9 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -31,7 +31,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; // Code -public class HdfsPlugin extends Plugin { +public final class HdfsPlugin extends Plugin { // initialize some problematic classes with elevated privileges static { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index c4b608dc82a..24f3a248264 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -50,7 +50,7 @@ import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -public class HdfsRepository extends BlobStoreRepository implements FileContextFactory { +public final class HdfsRepository extends BlobStoreRepository implements FileContextFactory { public final static String TYPE = "hdfs"; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index c86842baf32..59cbba8aff0 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -28,7 +28,7 @@ import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; -class SecurityUtils { +final class SecurityUtils { static V execute(FileContextFactory fcf, FcCallback callback) throws IOException { return execute(fcf.getFileContext(), callback); From deaf8884e90345a1110d2b5c5e6753c2051d0367 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 13:04:22 -0500 Subject: [PATCH 198/322] Fix exc handling --- .../repositories/hdfs/SecurityUtils.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index 59cbba8aff0..dda4d5e38ff 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.hdfs; import org.apache.hadoop.fs.FileContext; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.SpecialPermission; import java.io.IOException; @@ -49,17 +48,7 @@ final class SecurityUtils { } }); } catch (PrivilegedActionException pae) { - Throwable th = pae.getCause(); - if (th instanceof Error) { - throw (Error) th; - } - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - if (th instanceof IOException) { - throw (IOException) th; - } - throw new ElasticsearchException(pae); + throw (IOException) pae.getException(); } } } From 0ed45c5bfb8d7eb06b4ecab49876de77e959c3d3 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 14:16:53 -0500 Subject: [PATCH 199/322] remove filesystem leniency --- .../common/blobstore/fs/FsBlobContainer.java | 12 +-- .../support/AbstractBlobContainer.java | 9 ++ .../repositories/hdfs/HdfsBlobContainer.java | 94 +++++-------------- .../repositories/hdfs/HdfsBlobStore.java | 23 +++-- 4 files changed, 50 insertions(+), 88 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 70397839bf9..c62166a23a3 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import java.io.BufferedInputStream; @@ -97,6 +96,7 @@ public class FsBlobContainer extends AbstractBlobContainer { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final Path file = path.resolve(blobName); + // TODO: why is this not specifying CREATE_NEW? Do we really need to be able to truncate existing files? try (OutputStream outputStream = Files.newOutputStream(file)) { Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]); } @@ -104,16 +104,6 @@ public class FsBlobContainer extends AbstractBlobContainer { IOUtils.fsync(path, true); } - @Override - public void writeBlob(String blobName, BytesReference data) throws IOException { - final Path file = path.resolve(blobName); - try (OutputStream outputStream = Files.newOutputStream(file)) { - data.writeTo(outputStream); - } - IOUtils.fsync(file, false); - IOUtils.fsync(path, true); - } - @Override public void move(String source, String target) throws IOException { Path sourcePath = path.resolve(source); diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java index 9166491f07c..8f83bbf8098 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java @@ -22,8 +22,10 @@ package org.elasticsearch.common.blobstore.support; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; +import java.io.InputStream; import java.util.Collection; import java.util.Map; @@ -57,4 +59,11 @@ public abstract class AbstractBlobContainer implements BlobContainer { deleteBlob(blob); } } + + @Override + public void writeBlob(String blobName, BytesReference bytes) throws IOException { + try (InputStream stream = bytes.streamInput()) { + writeBlob(blobName, stream, bytes.length()); + } + } } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 723cfc2e8f5..7269faaa8bc 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -25,17 +25,15 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.fs.Syncable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; -import org.elasticsearch.common.bytes.BytesReference; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.util.Collections; import java.util.EnumSet; import java.util.LinkedHashMap; @@ -68,12 +66,16 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { - SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { - @Override - public Boolean doInHdfs(FileContext fc) throws IOException { - return fc.delete(new Path(path, blobName), true); - } - }); + try { + SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + @Override + public Boolean doInHdfs(FileContext fc) throws IOException { + return fc.delete(new Path(path, blobName), true); + } + }); + } catch (FileNotFoundException ok) { + // behaves like Files.deleteIfExists + } } @Override @@ -81,31 +83,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { - // _try_ to hsync the file before appending - // since append is optional this is a best effort - Path source = new Path(path, sourceBlobName); - - // try-with-resource is nice but since this is optional, it's hard to figure out - // what worked and what didn't. - // it's okay to not be able to append the file but not okay if hsync fails - // classic try / catch to the rescue - - FSDataOutputStream stream = null; - try { - stream = fc.create(source, EnumSet.of(CreateFlag.APPEND, CreateFlag.SYNC_BLOCK), CreateOpts.donotCreateParent()); - } catch (IOException ex) { - // append is optional, ignore - } - if (stream != null) { - try (OutputStream s = stream) { - if (s instanceof Syncable) { - ((Syncable) s).hsync(); - } - } - } - - // finally rename - fc.rename(source, new Path(path, targetBlobName)); + fc.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); return null; } }); @@ -127,49 +105,35 @@ final class HdfsBlobContainer extends AbstractBlobContainer { SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { - // don't use Streams to manually call hsync - // note that the inputstream is NOT closed here for two reasons: - // 1. it is closed already by ES after executing this method - // 0. closing the stream twice causes Hadoop to issue WARNING messages which are basically noise - // see https://issues.apache.org/jira/browse/HDFS-8099 - try (FSDataOutputStream stream = createOutput(fc, blobName)) { + Path blob = new Path(path, blobName); + // we pass CREATE, which means it fails if a blob already exists. + // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING + // that should be fixed there, no need to bring truncation into this, give the user an error. + EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); + CreateOpts[] opts = { CreateOpts.bufferSize(blobStore.bufferSizeInBytes()) }; + try (FSDataOutputStream stream = fc.create(blob, flags, opts)) { int bytesRead; byte[] buffer = new byte[blobStore.bufferSizeInBytes()]; while ((bytesRead = inputStream.read(buffer)) != -1) { stream.write(buffer, 0, bytesRead); + // For safety we also hsync each write as well, because of its docs: + // SYNC_BLOCK - to force closed blocks to the disk device + // "In addition Syncable.hsync() should be called after each write, + // if true synchronous behavior is required" + stream.hsync(); } - stream.hsync(); } return null; } }); } - @Override - public void writeBlob(String blobName, BytesReference bytes) throws IOException { - SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { - @Override - public Void doInHdfs(FileContext fc) throws IOException { - try (FSDataOutputStream stream = createOutput(fc, blobName)) { - bytes.writeTo(stream); - stream.hsync(); - } - return null; - } - }); - } - - private FSDataOutputStream createOutput(FileContext fc, String blobName) throws IOException { - return fc.create(new Path(path, blobName), EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK), - CreateOpts.bufferSize(blobStore.bufferSizeInBytes()), CreateOpts.createParent()); - } - @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { - return (!fc.util().exists(path) ? null : fc.util().listStatus(path, new PathFilter() { + return (fc.util().listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(blobNamePrefix); @@ -177,9 +141,6 @@ final class HdfsBlobContainer extends AbstractBlobContainer { })); } }); - if (files == null || files.length == 0) { - return Collections.emptyMap(); - } Map map = new LinkedHashMap(); for (FileStatus file : files) { map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen())); @@ -192,12 +153,9 @@ final class HdfsBlobContainer extends AbstractBlobContainer { FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { - return (!fc.util().exists(path) ? null : fc.util().listStatus(path)); + return fc.util().listStatus(path); } }); - if (files == null || files.length == 0) { - return Collections.emptyMap(); - } Map map = new LinkedHashMap(); for (FileStatus file : files) { map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen())); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 119908c313a..1cf6ea0c317 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.repositories.hdfs; +import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.elasticsearch.ElasticsearchException; @@ -34,26 +35,28 @@ import java.io.IOException; final class HdfsBlobStore extends AbstractComponent implements BlobStore { private final FileContextFactory fcf; - private final Path rootHdfsPath; + private final Path root; private final int bufferSizeInBytes; - HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path) throws IOException { + HdfsBlobStore(Settings settings, FileContextFactory fcf, Path root) throws IOException { super(settings); this.fcf = fcf; - this.rootHdfsPath = path; + this.root = root; this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); - mkdirs(path); + try { + mkdirs(root); + } catch (FileAlreadyExistsException ok) { + // behaves like Files.createDirectories + } } private void mkdirs(Path path) throws IOException { SecurityUtils.execute(fcf, new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { - if (fc.util().exists(path)) { - fc.mkdir(path, null, true); - } + fc.mkdir(path, null, true); return null; } }); @@ -61,7 +64,7 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { @Override public String toString() { - return rootHdfsPath.toUri().toString(); + return root.toUri().toString(); } FileContextFactory fileContextFactory() { @@ -92,6 +95,8 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { final Path path = translateToHdfsPath(blobPath); try { mkdirs(path); + } catch (FileAlreadyExistsException ok) { + // behaves like Files.createDirectories } catch (IOException ex) { throw new ElasticsearchException("failed to create blob container", ex); } @@ -99,7 +104,7 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { } private Path translateToHdfsPath(BlobPath blobPath) { - Path path = rootHdfsPath; + Path path = root; for (String p : blobPath) { path = new Path(path, p); } From c3f901400c0e4fccbbc2d422cbe85d356026bd53 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 11 Dec 2015 14:55:41 +0100 Subject: [PATCH 200/322] Move async translog sync logic into IndexService Today the logic to async - commit the translog is in every translog instance itself. While the setting is a per index setting we manageing it per shard. This polluts the translog code and can more easily be managed in IndexService. --- .../elasticsearch/cluster/ClusterModule.java | 3 +- .../org/elasticsearch/index/IndexService.java | 60 +++++++++++++++++++ .../elasticsearch/index/IndexSettings.java | 53 ++++++++++++++-- .../elasticsearch/index/shard/IndexShard.java | 22 +------ .../index/translog/Translog.java | 32 ---------- .../index/translog/TranslogConfig.java | 57 ++---------------- .../index/IndexSettingsTests.java | 19 ++++++ .../index/engine/InternalEngineTests.java | 8 +-- .../index/engine/ShadowEngineTests.java | 2 +- .../index/shard/IndexShardTests.java | 6 +- .../index/store/CorruptedTranslogIT.java | 13 ++-- .../index/translog/TranslogTests.java | 2 +- .../indices/stats/IndexStatsIT.java | 4 +- .../recovery/RecoveryWhileUnderLoadIT.java | 10 ++-- .../elasticsearch/test/ESIntegTestCase.java | 9 ++- 15 files changed, 162 insertions(+), 138 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 6a4831a2eee..42517854f08 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.indexing.IndexingSlowLog; import org.elasticsearch.index.search.stats.SearchSlowLog; @@ -182,7 +183,7 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY); registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); - registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); + registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); registerIndexDynamicSetting(IndicesRequestCache.DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index a6b66742c55..ced7407d606 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cluster.metadata.AliasMetaData; @@ -39,6 +40,7 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.query.QueryCache; +import org.elasticsearch.index.engine.EngineClosedException; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -57,9 +59,11 @@ import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; @@ -296,6 +300,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created"); eventListener.afterIndexShardCreated(indexShard); indexShard.updateRoutingEntry(routing, true); + if (shards.isEmpty() && this.indexSettings.getTranslogSyncInterval().millis() != 0) { + ThreadPool threadPool = nodeServicesProvider.getThreadPool(); + new AsyncTranslogFSync(this, threadPool).schedule(); // kick this off if we are the first shard in this service. + } shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); success = true; return indexShard; @@ -565,5 +573,57 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return indexStore; } // pkg private for testing + private void maybeFSyncTranslogs() { + if (indexSettings.getTranslogDurability() == Translog.Durabilty.ASYNC) { + for (IndexShard shard : this.shards.values()) { + try { + Translog translog = shard.getTranslog(); + if (translog.syncNeeded()) { + translog.sync(); + } + } catch (EngineClosedException | AlreadyClosedException ex) { + // fine - continue; + } catch (IOException e) { + logger.warn("failed to sync translog", e); + } + } + } + } + + + /** + * FSyncs the translog for all shards of this index in a defined interval. + */ + final static class AsyncTranslogFSync implements Runnable { + private final IndexService indexService; + private final ThreadPool threadPool; + + AsyncTranslogFSync(IndexService indexService, ThreadPool threadPool) { + this.indexService = indexService; + this.threadPool = threadPool; + } + + boolean mustRun() { + // don't re-schedule if its closed or if we dont' have a single shard here..., we are done + return (indexService.closed.get() || indexService.shards.isEmpty()) == false; + } + + void schedule() { + threadPool.schedule(indexService.getIndexSettings().getTranslogSyncInterval(), ThreadPool.Names.SAME, AsyncTranslogFSync.this); + } + + @Override + public void run() { + if (mustRun()) { + threadPool.executor(ThreadPool.Names.FLUSH).execute(() -> { + indexService.maybeFSyncTranslogs(); + if (mustRun()) { + schedule(); + } + }); + } + } + } + } diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index f0e06ea0bc7..9eb716dff5b 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -25,12 +25,11 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.translog.Translog; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; +import java.util.*; import java.util.function.Consumer; import java.util.function.Predicate; @@ -48,6 +47,9 @@ public final class IndexSettings { public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard"; public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard"; public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields"; + public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; + public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; + private final String uuid; private final List> updateListeners; private final Index index; @@ -67,6 +69,8 @@ public final class IndexSettings { private final boolean queryStringAllowLeadingWildcard; private final boolean defaultAllowUnmappedFields; private final Predicate indexNameMatcher; + private volatile Translog.Durabilty durabilty; + private final TimeValue syncInterval; /** * Returns the default search field for this index. @@ -127,7 +131,7 @@ public final class IndexSettings { public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners, final Predicate indexNameMatcher) { this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); - this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners)); + this.updateListeners = Collections.unmodifiableList( new ArrayList<>(updateListeners)); this.index = new Index(indexMetaData.getIndex()); version = Version.indexCreated(settings); uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); @@ -144,6 +148,10 @@ public final class IndexSettings { this.parseFieldMatcher = new ParseFieldMatcher(settings); this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true); this.indexNameMatcher = indexNameMatcher; + final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST.name()); + this.durabilty = getFromSettings(settings, Translog.Durabilty.REQUEST); + syncInterval = settings.getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); + assert indexNameMatcher.test(indexMetaData.getIndex()); } @@ -295,6 +303,11 @@ public final class IndexSettings { logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); } } + try { + updateSettings(mergedSettings); + } catch (Exception e) { + logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); + } return true; } @@ -304,4 +317,34 @@ public final class IndexSettings { List> getUpdateListeners() { // for testing return updateListeners; } + + /** + * Returns the translog durability for this index. + */ + public Translog.Durabilty getTranslogDurability() { + return durabilty; + } + + public Translog.Durabilty getFromSettings(Settings settings, Translog.Durabilty defaultValue) { + final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); + try { + return Translog.Durabilty.valueOf(value.toUpperCase(Locale.ROOT)); + } catch (IllegalArgumentException ex) { + logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durabilty.values())); + return defaultValue; + } + } + + private void updateSettings(Settings settings) { + final Translog.Durabilty durabilty = getFromSettings(settings, this.durabilty); + if (durabilty != this.durabilty) { + logger.info("updating durability from [{}] to [{}]", this.durabilty, durabilty); + this.durabilty = durabilty; + } + } + + public TimeValue getTranslogSyncInterval() { + return syncInterval; + } + } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index fd8490ae8e0..b895588321a 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -254,8 +254,8 @@ public class IndexShard extends AbstractIndexShardComponent { logger.debug("state: [CREATED]"); this.checkIndexOnStartup = settings.get("index.shard.check_on_startup", "false"); - this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, getFromSettings(logger, settings, Translog.Durabilty.REQUEST), - provider.getBigArrays(), threadPool); + this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, + provider.getBigArrays()); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -1159,12 +1159,6 @@ public class IndexShard extends AbstractIndexShardComponent { this.flushOnClose = flushOnClose; } - final Translog.Durabilty durabilty = getFromSettings(logger, settings, translogConfig.getDurabilty()); - if (durabilty != translogConfig.getDurabilty()) { - logger.info("updating durability from [{}] to [{}]", translogConfig.getDurabilty(), durabilty); - translogConfig.setDurabilty(durabilty); - } - TimeValue refreshInterval = settings.getAsTime(INDEX_REFRESH_INTERVAL, this.refreshInterval); if (!refreshInterval.equals(this.refreshInterval)) { logger.info("updating refresh_interval from [{}] to [{}]", this.refreshInterval, refreshInterval); @@ -1541,17 +1535,7 @@ public class IndexShard extends AbstractIndexShardComponent { * Returns the current translog durability mode */ public Translog.Durabilty getTranslogDurability() { - return translogConfig.getDurabilty(); - } - - private static Translog.Durabilty getFromSettings(ESLogger logger, Settings settings, Translog.Durabilty defaultValue) { - final String value = settings.get(TranslogConfig.INDEX_TRANSLOG_DURABILITY, defaultValue.name()); - try { - return Translog.Durabilty.valueOf(value.toUpperCase(Locale.ROOT)); - } catch (IllegalArgumentException ex) { - logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", TranslogConfig.INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durabilty.values())); - return defaultValue; - } + return indexSettings.getTranslogDurability(); } private final AtomicBoolean asyncFlushRunning = new AtomicBoolean(); diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 7da54ed8a37..f5895a1f5b1 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -47,7 +47,6 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShardComponent; -import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.EOFException; @@ -160,9 +159,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC writeLock = new ReleasableLock(rwl.writeLock()); this.location = config.getTranslogPath(); Files.createDirectories(this.location); - if (config.getSyncInterval().millis() > 0 && config.getThreadPool() != null) { - syncScheduler = config.getThreadPool().schedule(config.getSyncInterval(), ThreadPool.Names.SAME, new Sync()); - } try { if (translogGeneration != null) { @@ -715,34 +711,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } - class Sync implements Runnable { - @Override - public void run() { - // don't re-schedule if its closed..., we are done - if (closed.get()) { - return; - } - final ThreadPool threadPool = config.getThreadPool(); - if (syncNeeded()) { - threadPool.executor(ThreadPool.Names.FLUSH).execute(new Runnable() { - @Override - public void run() { - try { - sync(); - } catch (Exception e) { - logger.warn("failed to sync translog", e); - } - if (closed.get() == false) { - syncScheduler = threadPool.schedule(config.getSyncInterval(), ThreadPool.Names.SAME, Sync.this); - } - } - }); - } else { - syncScheduler = threadPool.schedule(config.getSyncInterval(), ThreadPool.Names.SAME, Sync.this); - } - } - } - public static class Location implements Accountable, Comparable { public final long generation; diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java index 442792f3132..682c3101027 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java @@ -38,16 +38,9 @@ import java.nio.file.Path; */ public final class TranslogConfig { - public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; - public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; public static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(8, ByteSizeUnit.KB); - - private final TimeValue syncInterval; private final BigArrays bigArrays; - private final ThreadPool threadPool; - private final boolean syncOnEachOperation; private volatile TranslogGeneration translogGeneration; - private volatile Translog.Durabilty durabilty = Translog.Durabilty.REQUEST; private final IndexSettings indexSettings; private final ShardId shardId; private final Path translogPath; @@ -58,67 +51,25 @@ public final class TranslogConfig { * @param shardId the shard ID this translog belongs to * @param translogPath the path to use for the transaction log files * @param indexSettings the index settings used to set internal variables - * @param durabilty the default durability setting for the translog * @param bigArrays a bigArrays instance used for temporarily allocating write operations - * @param threadPool a {@link ThreadPool} to schedule async sync durability */ - public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool) { - this(shardId, translogPath, indexSettings, durabilty, bigArrays, threadPool, DEFAULT_BUFFER_SIZE); + public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, BigArrays bigArrays) { + this(shardId, translogPath, indexSettings, bigArrays, DEFAULT_BUFFER_SIZE); } - TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool, ByteSizeValue bufferSize) { + TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, BigArrays bigArrays, ByteSizeValue bufferSize) { this.bufferSize = bufferSize; this.indexSettings = indexSettings; this.shardId = shardId; this.translogPath = translogPath; - this.durabilty = durabilty; - this.threadPool = threadPool; this.bigArrays = bigArrays; - - syncInterval = indexSettings.getSettings().getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); - if (syncInterval.millis() > 0 && threadPool != null) { - syncOnEachOperation = false; - } else if (syncInterval.millis() == 0) { - syncOnEachOperation = true; - } else { - syncOnEachOperation = false; - } - } - - - /** - * Returns a {@link ThreadPool} to schedule async durability operations - */ - public ThreadPool getThreadPool() { - return threadPool; - } - - /** - * Returns the current durability mode of this translog. - */ - public Translog.Durabilty getDurabilty() { - return durabilty; - } - - /** - * Sets the current durability mode for the translog. - */ - public void setDurabilty(Translog.Durabilty durabilty) { - this.durabilty = durabilty; } /** * Returns true iff each low level operation shoudl be fsynced */ public boolean isSyncOnEachOperation() { - return syncOnEachOperation; - } - - /** - * Returns the current async fsync interval - */ - public TimeValue getSyncInterval() { - return syncInterval; + return indexSettings.getTranslogSyncInterval().millis() == 0; } /** diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 3f97fe402fa..9c2dec73c02 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -158,4 +159,22 @@ public class IndexSettingsTests extends ESTestCase { } + public void testUpdateDurability() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "async") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + assertEquals(Translog.Durabilty.ASYNC, settings.getTranslogDurability()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "request").build())); + assertEquals(Translog.Durabilty.REQUEST, settings.getTranslogDurability()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + assertEquals(Translog.Durabilty.REQUEST, settings.getTranslogDurability()); // test default + } + + } diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 9a33a052198..c8960883b19 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -253,7 +253,7 @@ public class InternalEngineTests extends ESTestCase { } protected Translog createTranslog(Path translogPath) throws IOException { - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE); return new Translog(translogConfig); } @@ -271,7 +271,7 @@ public class InternalEngineTests extends ESTestCase { public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { IndexWriterConfig iwc = newIndexWriterConfig(); - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, INDEX_SETTINGS), indexSettings , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig, @@ -1975,14 +1975,14 @@ public class InternalEngineTests extends ESTestCase { Translog.TranslogGeneration generation = engine.getTranslog().getGeneration(); engine.close(); - Translog translog = new Translog(new TranslogConfig(shardId, createTempDir(), INDEX_SETTINGS, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool)); + Translog translog = new Translog(new TranslogConfig(shardId, createTempDir(), INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE)); translog.add(new Translog.Index("test", "SomeBogusId", "{}".getBytes(Charset.forName("UTF-8")))); assertEquals(generation.translogFileGeneration, translog.currentFileGeneration()); translog.close(); EngineConfig config = engine.config(); /* create a TranslogConfig that has been created with a different UUID */ - TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE); EngineConfig brokenConfig = new EngineConfig(shardId, threadPool, config.getIndexingService(), config.getIndexSettings() , null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getMergeSchedulerConfig(), diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 8e2501e9430..339dac2e58a 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -224,7 +224,7 @@ public class ShadowEngineTests extends ESTestCase { public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { IndexWriterConfig iwc = newIndexWriterConfig(); - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, indexSettings), indexSettings , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig, iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(null, logger), new Engine.EventListener() { diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index e1c7cad5607..c0cf16ba6e3 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -69,6 +69,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; @@ -87,7 +88,6 @@ import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.DummyShardLock; @@ -418,7 +418,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } private void setDurability(IndexShard shard, Translog.Durabilty durabilty) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, durabilty.name()).build()).get(); + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durabilty.name()).build()).get(); assertEquals(durabilty, shard.getTranslogDurability()); } @@ -691,7 +691,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testMaybeFlush() throws Exception { - createIndex("test", settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST).build()); + createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index f34c83da64b..d712d846c47 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -27,18 +27,17 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.engine.MockEngineSupport; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import java.io.IOException; @@ -53,6 +52,7 @@ import java.util.Collection; import java.util.List; import java.util.Set; import java.util.TreeSet; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -71,7 +71,6 @@ public class CorruptedTranslogIT extends ESIntegTestCase { return pluginList(MockTransportService.TestPlugin.class); } - @TestLogging("index.translog:TRACE,index.gateway:TRACE") public void testCorruptTranslogFiles() throws Exception { internalCluster().startNodesAsync(1, Settings.EMPTY).get(); @@ -81,7 +80,6 @@ public class CorruptedTranslogIT extends ESIntegTestCase { .put("index.refresh_interval", "-1") .put(MockEngineSupport.FLUSH_ON_CLOSE_RATIO, 0.0d) // never flush - always recover from translog .put(IndexShard.INDEX_FLUSH_ON_CLOSE, false) // never flush - always recover from translog - .put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, "1s") // fsync the translog every second )); ensureYellow(); @@ -99,14 +97,13 @@ public class CorruptedTranslogIT extends ESIntegTestCase { // Restart the single node internalCluster().fullRestart(); - // node needs time to start recovery and discover the translog corruption - Thread.sleep(1000); - enableTranslogFlush("test"); + client().admin().cluster().prepareHealth().setWaitForYellowStatus().setTimeout(new TimeValue(1000, TimeUnit.MILLISECONDS)).setWaitForEvents(Priority.LANGUID).get(); try { client().prepareSearch("test").setQuery(matchAllQuery()).get(); fail("all shards should be failed due to a corrupted translog"); } catch (SearchPhaseExecutionException e) { + e.printStackTrace(); // Good, all shards should be failed because there is only a // single shard and its translog is corrupt } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index db9f212b568..c1d2b2e1997 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -139,7 +139,7 @@ public class TranslogTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .build(); ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null, bufferSize); + return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index e40e1c03de1..86aae630857 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -46,6 +47,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.search.sort.SortOrder; @@ -316,7 +318,7 @@ public class IndexStatsIT extends ESIntegTestCase { .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") .put("index.merge.policy.type", "tiered") - .put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, "ASYNC") + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC.name()) )); ensureGreen(); long termUpto = 0; diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index bdac3dad8e6..0da313a1bd5 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -29,9 +29,9 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.BackgroundIndexer; @@ -55,7 +55,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -108,7 +108,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -159,7 +159,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -230,7 +230,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index b28842b1976..bc3ff2f1819 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -95,6 +95,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.MockEngineFactoryPlugin; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -104,8 +105,6 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; -import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.store.IndicesStore; @@ -518,14 +517,14 @@ public abstract class ESIntegTestCase extends ESTestCase { builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { - builder.put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durabilty.values())); + builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durabilty.values())); } if (random.nextBoolean()) { if (rarely(random)) { - builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op } else { - builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); } } From 3a2464b80ec089a85b2678764fc620a053a00cef Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 15:37:34 -0500 Subject: [PATCH 201/322] improve build logic on windows without native libraries --- plugins/repository-hdfs/build.gradle | 7 ++----- .../rest-api-spec/test/hdfs_repository/10_basic.yaml | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 361166a2d0b..bf27ecc22c1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -93,11 +93,8 @@ integTest { dependsOn hdfsFixture } else { logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") - systemProperty 'tests.rest.blacklist', [ - 'hdfs_repository/20_repository/*', - 'hdfs_repository/30_snapshot/*', - 'hdfs_repository/40_restore/*' - ].join(',') + // just tests that the plugin loads + systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic' } } diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index b7bc644a832..e8f118c4e02 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -2,7 +2,7 @@ # # Check plugin is installed # -"HDFS Repository loaded": +"Plugin loaded": - do: cluster.state: {} From 87494dd7dbb58409a4f69a178d2b06e59bf3aa11 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 22:06:51 +0100 Subject: [PATCH 202/322] fix spelling and remove wildcard imports --- .../TransportReplicationAction.java | 2 +- .../org/elasticsearch/index/IndexService.java | 2 +- .../elasticsearch/index/IndexSettings.java | 31 +++++++++++-------- .../elasticsearch/index/shard/IndexShard.java | 5 +-- .../index/translog/Translog.java | 2 +- .../index/IndexSettingsTests.java | 6 ++-- .../index/shard/IndexShardTests.java | 22 ++++++------- .../indices/stats/IndexStatsIT.java | 3 +- .../recovery/RecoveryWhileUnderLoadIT.java | 8 ++--- .../elasticsearch/test/ESIntegTestCase.java | 2 +- 10 files changed, 42 insertions(+), 41 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 34f7422196e..d96ec177366 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1018,7 +1018,7 @@ public abstract class TransportReplicationAction indexNameMatcher; - private volatile Translog.Durabilty durabilty; + private volatile Translog.Durability durability; private final TimeValue syncInterval; /** @@ -148,8 +153,8 @@ public final class IndexSettings { this.parseFieldMatcher = new ParseFieldMatcher(settings); this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true); this.indexNameMatcher = indexNameMatcher; - final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST.name()); - this.durabilty = getFromSettings(settings, Translog.Durabilty.REQUEST); + final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST.name()); + this.durability = getFromSettings(settings, Translog.Durability.REQUEST); syncInterval = settings.getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); assert indexNameMatcher.test(indexMetaData.getIndex()); @@ -321,25 +326,25 @@ public final class IndexSettings { /** * Returns the translog durability for this index. */ - public Translog.Durabilty getTranslogDurability() { - return durabilty; + public Translog.Durability getTranslogDurability() { + return durability; } - public Translog.Durabilty getFromSettings(Settings settings, Translog.Durabilty defaultValue) { + public Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); try { - return Translog.Durabilty.valueOf(value.toUpperCase(Locale.ROOT)); + return Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException ex) { - logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durabilty.values())); + logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durability.values())); return defaultValue; } } private void updateSettings(Settings settings) { - final Translog.Durabilty durabilty = getFromSettings(settings, this.durabilty); - if (durabilty != this.durabilty) { - logger.info("updating durability from [{}] to [{}]", this.durabilty, durabilty); - this.durabilty = durabilty; + final Translog.Durability durability = getFromSettings(settings, this.durability); + if (durability != this.durability) { + logger.info("updating durability from [{}] to [{}]", this.durability, durability); + this.durability = durability; } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b895588321a..4545787231a 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -109,7 +109,6 @@ import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogStats; -import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.index.warmer.ShardIndexWarmerService; import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.indices.IndicesWarmer; @@ -126,10 +125,8 @@ import java.io.IOException; import java.io.PrintStream; import java.nio.channels.ClosedByInterruptException; import java.nio.charset.StandardCharsets; -import java.util.Arrays; import java.util.EnumSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; @@ -1534,7 +1531,7 @@ public class IndexShard extends AbstractIndexShardComponent { /** * Returns the current translog durability mode */ - public Translog.Durabilty getTranslogDurability() { + public Translog.Durability getTranslogDurability() { return indexSettings.getTranslogDurability(); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index f5895a1f5b1..c2cb3c19af8 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1156,7 +1156,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } - public enum Durabilty { + public enum Durability { /** * Async durability - translogs are synced based on a time interval. */ diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 9c2dec73c02..316badf376b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -165,15 +165,15 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "async") .build()); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); - assertEquals(Translog.Durabilty.ASYNC, settings.getTranslogDurability()); + assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability()); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "request").build())); - assertEquals(Translog.Durabilty.REQUEST, settings.getTranslogDurability()); + assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); metaData = newIndexMeta("index", Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); - assertEquals(Translog.Durabilty.REQUEST, settings.getTranslogDurability()); // test default + assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); // test default } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index c0cf16ba6e3..d749ba56594 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -391,35 +391,35 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); client().prepareIndex("test", "bar", "2").setSource("{}").get(); assertTrue(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); client().prepareDelete("test", "bar", "1").get(); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); client().prepareDelete("test", "bar", "2").get(); assertTrue(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "3").setSource("{}")) .add(client().prepareDelete("test", "bar", "1")).get()); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "4").setSource("{}")) .add(client().prepareDelete("test", "bar", "3")).get()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertTrue(shard.getEngine().getTranslog().syncNeeded()); } - private void setDurability(IndexShard shard, Translog.Durabilty durabilty) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durabilty.name()).build()).get(); - assertEquals(durabilty, shard.getTranslogDurability()); + private void setDurability(IndexShard shard, Translog.Durability durability) { + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durability.name()).build()).get(); + assertEquals(durability, shard.getTranslogDurability()); } public void testMinimumCompatVersion() { @@ -691,7 +691,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testMaybeFlush() throws Exception { - createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST).build()); + createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 86aae630857..b40ceb00b16 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -318,7 +317,7 @@ public class IndexStatsIT extends ESIntegTestCase { .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") .put("index.merge.policy.type", "tiered") - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC.name()) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC.name()) )); ensureGreen(); long termUpto = 0; diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 0da313a1bd5..438445d538a 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -55,7 +55,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -108,7 +108,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -159,7 +159,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -230,7 +230,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index bc3ff2f1819..118120ecd73 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -517,7 +517,7 @@ public abstract class ESIntegTestCase extends ESTestCase { builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durabilty.values())); + builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durability.values())); } if (random.nextBoolean()) { From 98f9749ee17a2b66585858d505b0006566bc784d Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Dec 2015 22:12:32 +0100 Subject: [PATCH 203/322] fix visibility --- core/src/main/java/org/elasticsearch/index/IndexSettings.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 4bcc4e7ed84..772fb053cda 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -330,7 +330,7 @@ public final class IndexSettings { return durability; } - public Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { + private Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); try { return Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)); From b8524bdb112058f5d5072ca2e7c38d1bff03f2d7 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 16:16:24 -0500 Subject: [PATCH 204/322] add tests --- .../test/hdfs_repository/10_basic.yaml | 13 +++++ ...ository.yaml => 20_repository_create.yaml} | 2 +- .../hdfs_repository/20_repository_delete.yaml | 50 +++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) rename plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/{20_repository.yaml => 20_repository_create.yaml} (94%) create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index e8f118c4e02..75c64d82d2d 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -14,3 +14,16 @@ - match: { nodes.$master.plugins.0.name: repository-hdfs } - match: { nodes.$master.plugins.0.jvm: true } +--- +# Check that we can't use file:// repositories or anything like that +# We only test this plugin against hdfs:// +"HDFS only": + - do: + catch: /Invalid scheme/ + snapshot.create_repository: + repository: misconfigured_repository + body: + type: hdfs + settings: + uri: "file://bogus" + path: "foo/bar" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml similarity index 94% rename from plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml rename to plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml index e4e004c396e..06e33c53257 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml @@ -1,6 +1,6 @@ # Integration tests for HDFS Repository plugin # -# Check plugin is installed +# Tests creating a repository # "HDFS Repository Config": - do: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml new file mode 100644 index 00000000000..34c770a8074 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml @@ -0,0 +1,50 @@ +# Integration tests for HDFS Repository plugin +# +# Tests creating a repository, then deleting it and creating it again. +# +"HDFS Delete Repository": + # Create repository + - do: + snapshot.create_repository: + repository: test_repo_hdfs_1 + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "foo/bar" + + # Get repository + - do: + snapshot.get_repository: + repository: test_repo_hdfs_1 + + - is_true: test_repo_hdfs_1 + - match: {test_repo_hdfs_1.settings.path : "foo/bar"} + + # Delete repository + - do: + snapshot.delete_repository: + repository: test_repo_hdfs_1 + + # Get repository: It should be gone + - do: + catch: /repository_missing_exception/ + snapshot.get_repository: + repository: test_repo_hdfs_1 + + # Create it again + - do: + snapshot.create_repository: + repository: test_repo_hdfs_1 + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "foo/bar" + + # Get repository again + - do: + snapshot.get_repository: + repository: test_repo_hdfs_1 + + - is_true: test_repo_hdfs_1 From 5ebcf183e5f4296628423b07b1cdff19e9631ab3 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 17:02:50 -0500 Subject: [PATCH 205/322] tests --- .../test/hdfs_repository/10_basic.yaml | 2 + .../hdfs_repository/20_repository_create.yaml | 18 ++++-- .../hdfs_repository/20_repository_verify.yaml | 23 +++++++ .../test/hdfs_repository/30_snapshot.yaml | 27 ++++++--- .../test/hdfs_repository/30_snapshot_get.yaml | 60 +++++++++++++++++++ .../test/hdfs_repository/40_restore.yaml | 30 +++++++--- 6 files changed, 138 insertions(+), 22 deletions(-) create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml create mode 100644 plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index 75c64d82d2d..7c569408a61 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -15,8 +15,10 @@ - match: { nodes.$master.plugins.0.name: repository-hdfs } - match: { nodes.$master.plugins.0.jvm: true } --- +# # Check that we can't use file:// repositories or anything like that # We only test this plugin against hdfs:// +# "HDFS only": - do: catch: /Invalid scheme/ diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml index 06e33c53257..0f942dfdc03 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml @@ -2,20 +2,26 @@ # # Tests creating a repository # -"HDFS Repository Config": +"HDFS Repository Creation": + # Create repository - do: snapshot.create_repository: - repository: test_repo_hdfs_1 + repository: test_repository_create body: type: hdfs settings: uri: "hdfs://localhost:9999" - path: "foo/bar" + path: "test/repository_create" # Get repository - do: snapshot.get_repository: - repository: test_repo_hdfs_1 + repository: test_repository_create - - is_true: test_repo_hdfs_1 - - match: {test_repo_hdfs_1.settings.path : "foo/bar"} + - is_true: test_repository_create + - match: {test_repository_create.settings.path : "test/repository_create"} + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_repository_create diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml new file mode 100644 index 00000000000..d1695b00d9d --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml @@ -0,0 +1,23 @@ +# Integration tests for HDFS Repository plugin +# +# Tests explicit verify +# +"HDFS Repository Verify": + - do: + snapshot.create_repository: + repository: test_repository_verify + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/repository_verify" + + # Verify repository + - do: + snapshot.verify_repository: + repository: test_repository_verify + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_repository_verify diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index a7af512666c..7db9a429230 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -3,17 +3,18 @@ # Actually perform a snapshot to hdfs # --- -setup: - +"take snapshot": + # Create repository - do: snapshot.create_repository: - repository: test_repo_hdfs_snapshot + repository: test_snapshot_repository body: type: hdfs settings: uri: "hdfs://localhost:9999" - path: "foo/bar" + path: "test/snapshot" + # Create index - do: indices.create: index: test_index @@ -22,16 +23,15 @@ setup: number_of_shards: 1 number_of_replicas: 1 + # Wait for yellow - do: cluster.health: wait_for_status: yellow ---- -"Create a snapshot": - + # Create snapshot - do: snapshot.create: - repository: test_repo_hdfs_snapshot + repository: test_snapshot_repository snapshot: test_snapshot wait_for_completion: true @@ -40,3 +40,14 @@ setup: - match: { snapshot.shards.successful: 1 } - match: { snapshot.shards.failed : 0 } + # Remove our snapshot + - do: + snapshot.delete: + repository: test_snapshot_repository + snapshot: test_snapshot + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_snapshot_repository + diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml new file mode 100644 index 00000000000..5819cc32db1 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml @@ -0,0 +1,60 @@ +# Integration tests for HDFS Repository plugin +# +# Tests retrieving information about snapshot +# +--- +"Get a snapshot": + # Create repository + - do: + snapshot.create_repository: + repository: test_snapshot_get_repository + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/snapshot_get" + + # Create index + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + # Wait for green + - do: + cluster.health: + wait_for_status: green + + # Create snapshot + - do: + snapshot.create: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + wait_for_completion: true + + - match: { snapshot.snapshot: test_snapshot_get } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + + # Get snapshot info + - do: + snapshot.get: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + + - match: { snapshots.0.snapshot : test_snapshot_get } + + # Remove our snapshot + - do: + snapshot.delete: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_snapshot_get_repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index 2327b5da906..47559226275 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -3,17 +3,19 @@ # Actually perform a snapshot to hdfs, then restore it # --- -setup: +"Create a snapshot and then restore it": + # Create repository - do: snapshot.create_repository: - repository: test_repo_hdfs_restore + repository: test_restore_repository body: type: hdfs settings: uri: "hdfs://localhost:9999" - path: "foo/bar" + path: "test/restore" + # Create index - do: indices.create: index: test_index @@ -22,16 +24,15 @@ setup: number_of_shards: 1 number_of_replicas: 0 + # Wait for green - do: cluster.health: wait_for_status: green ---- -"Create a snapshot and then restore it": - + # Take snapshot - do: snapshot.create: - repository: test_repo_hdfs_restore + repository: test_restore_repository snapshot: test_restore wait_for_completion: true @@ -42,16 +43,19 @@ setup: - is_true: snapshot.version - gt: { snapshot.version_id: 0} + # Close index - do: indices.close: index : test_index + # Restore index - do: snapshot.restore: - repository: test_repo_hdfs_restore + repository: test_restore_repository snapshot: test_restore wait_for_completion: true + # Check recovery stats - do: indices.recovery: index: test_index @@ -63,3 +67,13 @@ setup: - match: { test_index.shards.0.index.files.reused: 0} - match: { test_index.shards.0.index.size.reused_in_bytes: 0} + # Remove our snapshot + - do: + snapshot.delete: + repository: test_restore_repository + snapshot: test_restore + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_restore_repository From 7065639a267197eede22983cd0dc001968ee3bc0 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 17:25:15 -0500 Subject: [PATCH 206/322] add test for listing --- .../test/hdfs_repository/30_snapshot_get.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml index 5819cc32db1..f38f4783b19 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml @@ -46,6 +46,16 @@ repository: test_snapshot_get_repository snapshot: test_snapshot_get + - length: { snapshots: 1 } + - match: { snapshots.0.snapshot : test_snapshot_get } + + # List snapshot info + - do: + snapshot.get: + repository: test_snapshot_get_repository + snapshot: "*" + + - length: { snapshots: 1 } - match: { snapshots.0.snapshot : test_snapshot_get } # Remove our snapshot From 2cbfc54a81824804500b6645c48c2970898340af Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 15:23:28 -0800 Subject: [PATCH 207/322] avoid too-long classpath so it works on windows --- plugins/repository-hdfs/build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index bf27ecc22c1..eb3004af2ad 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -64,8 +64,8 @@ dependencyLicenses { task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { dependsOn project.configurations.hdfsFixture executable = new File(project.javaHome, 'bin/java') - args '-cp', "${ -> project.configurations.hdfsFixture.asPath }", - 'hdfs.MiniHDFS', + env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" + args 'hdfs.MiniHDFS', baseDir } From 26eaa16a89a082fbaf685803b47b2eca4093b4dc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 21 Dec 2015 15:55:13 -0800 Subject: [PATCH 208/322] Remove "additional config" from hdfs repositories --- .../repositories/hdfs/HdfsRepository.java | 51 +------------------ .../repositories/hdfs/HdfsTests.java | 5 +- .../src/test/resources/additional-cfg.xml | 12 ----- .../src/test/resources/conf-2.xml | 12 ----- 4 files changed, 3 insertions(+), 77 deletions(-) delete mode 100644 plugins/repository-hdfs/src/test/resources/additional-cfg.xml delete mode 100644 plugins/repository-hdfs/src/test/resources/conf-2.xml diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 24f3a248264..657df50c09b 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -52,8 +52,6 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; public final class HdfsRepository extends BlobStoreRepository implements FileContextFactory { - public final static String TYPE = "hdfs"; - private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; @@ -77,7 +75,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); } - + @Override protected void doStart() { if (!Strings.hasText(uri)) { @@ -183,13 +181,6 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon cfg.setClassLoader(this.getClass().getClassLoader()); cfg.reloadConfiguration(); - String confLocation = repositorySettings.settings().get("conf_location", settings.get("conf_location")); - if (Strings.hasText(confLocation)) { - for (String entry : Strings.commaDelimitedListToStringArray(confLocation)) { - addConfigLocation(cfg, entry.trim()); - } - } - Map map = repositorySettings.settings().getByPrefix("conf.").getAsMap(); for (Entry entry : map.entrySet()) { cfg.set(entry.getKey(), entry.getValue()); @@ -214,46 +205,6 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon } } - @SuppressForbidden(reason = "Where is this reading configuration files from? It should use Environment for ES conf dir") - private void addConfigLocation(Configuration cfg, String confLocation) { - URL cfgURL = null; - // it's an URL - if (!confLocation.contains(":")) { - cfgURL = cfg.getClassLoader().getResource(confLocation); - - // fall back to file - if (cfgURL == null) { - java.nio.file.Path path = PathUtils.get(confLocation); - if (!Files.isReadable(path)) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, - "Cannot find classpath resource or file 'conf_location' [%s] defined for hdfs snapshot/restore", - confLocation)); - } - String pathLocation = path.toUri().toString(); - logger.debug("Adding path [{}] as file [{}]", confLocation, pathLocation); - confLocation = pathLocation; - } - else { - logger.debug("Resolving path [{}] to classpath [{}]", confLocation, cfgURL); - } - } - else { - logger.debug("Adding path [{}] as URL", confLocation); - } - - if (cfgURL == null) { - try { - cfgURL = new URL(confLocation); - } catch (MalformedURLException ex) { - throw new IllegalArgumentException(String.format(Locale.ROOT, - "Invalid 'conf_location' URL [%s] defined for hdfs snapshot/restore", confLocation), ex); - } - } - - cfg.addResource(cfgURL); - } - @Override protected BlobStore blobStore() { return blobStore; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index b132296ce12..9e833ad6c72 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -54,7 +54,6 @@ public class HdfsTests extends ESIntegTestCase { .put("uri", "hdfs:///") .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) .put("path", "foo") - .put("conf", "additional-cfg.xml, conf-2.xml") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean()) ).get(); @@ -143,7 +142,7 @@ public class HdfsTests extends ESIntegTestCase { // expected } } - + public void testNonHdfsUri() { Client client = client(); try { @@ -191,4 +190,4 @@ public class HdfsTests extends ESIntegTestCase { private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/test/resources/additional-cfg.xml b/plugins/repository-hdfs/src/test/resources/additional-cfg.xml deleted file mode 100644 index b1b6611e924..00000000000 --- a/plugins/repository-hdfs/src/test/resources/additional-cfg.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - foo - foo - - - paradise - lost - - diff --git a/plugins/repository-hdfs/src/test/resources/conf-2.xml b/plugins/repository-hdfs/src/test/resources/conf-2.xml deleted file mode 100644 index b1b6611e924..00000000000 --- a/plugins/repository-hdfs/src/test/resources/conf-2.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - foo - foo - - - paradise - lost - - From 3c07a427dc23708c35ae088f2f8fd13de1854060 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 19:06:37 -0500 Subject: [PATCH 209/322] fix exc handling --- .../repositories/hdfs/HdfsRepository.java | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 657df50c09b..5893fe700a7 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -131,17 +131,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon } }); } catch (PrivilegedActionException pae) { - Throwable th = pae.getCause(); - if (th instanceof Error) { - throw (Error) th; - } - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - if (th instanceof IOException) { - throw (IOException) th; - } - throw new ElasticsearchException(pae); + throw (IOException) pae.getException(); } } From d0e930641394b1d6d2434e94a12bbaade708c867 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 21 Dec 2015 16:15:28 -0800 Subject: [PATCH 210/322] Remove reading node settings as defaults for hdfs repository settings --- .../repositories/hdfs/HdfsRepository.java | 35 ++++++++----------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 5893fe700a7..e0051425179 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -18,18 +18,6 @@ */ package org.elasticsearch.repositories.hdfs; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.nio.file.Files; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.Locale; -import java.util.Map; -import java.util.Map.Entry; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FileContext; @@ -39,17 +27,24 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import java.io.IOException; +import java.net.URI; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; + public final class HdfsRepository extends BlobStoreRepository implements FileContextFactory { private final BlobPath basePath; @@ -67,13 +62,13 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon this.repositorySettings = repositorySettings; - uri = repositorySettings.settings().get("uri", settings.get("uri")); - path = repositorySettings.settings().get("path", settings.get("path")); + uri = repositorySettings.settings().get("uri"); + path = repositorySettings.settings().get("path"); this.basePath = BlobPath.cleanPath(); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); + this.compress = repositorySettings.settings().getAsBoolean("compress", false); } @Override @@ -107,7 +102,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon } }); logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(settings, this, hdfsPath); + blobStore = new HdfsBlobStore(repositorySettings.settings(), this, hdfsPath); } catch (IOException e) { throw new RuntimeException(e); } @@ -167,7 +162,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon private FileContext initFileContext(RepositorySettings repositorySettings) throws IOException { - Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", settings.getAsBoolean("load_defaults", true))); + Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true)); cfg.setClassLoader(this.getClass().getClassLoader()); cfg.reloadConfiguration(); From 267cd65506ab22ceb7606243ef58979bde5dbfe5 Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Wed, 16 Dec 2015 00:01:31 +0900 Subject: [PATCH 211/322] Analysis : Fix no response from Analyze API without specified index Fix error handling in TransportSingleShardAction without shardIt Closes #15148 --- .../single/shard/TransportSingleShardAction.java | 2 +- .../indices/analyze/AnalyzeActionIT.java | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index c14878a3fca..2a7e19bfade 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -177,7 +177,7 @@ public abstract class TransportSingleShardAction Date: Mon, 21 Dec 2015 20:07:43 -0500 Subject: [PATCH 212/322] remove shitton of permissions --- .../repositories/hdfs/HdfsRepository.java | 74 ++++++++++--------- .../plugin-metadata/plugin-security.policy | 27 +------ 2 files changed, 45 insertions(+), 56 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index e0051425179..63aba8f9a78 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -18,15 +18,29 @@ */ package org.elasticsearch.repositories.hdfs; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.net.URI; +import java.security.AccessController; +import java.security.Principal; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Collections; +import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; + +import javax.security.auth.Subject; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; @@ -36,15 +50,6 @@ import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.io.IOException; -import java.net.URI; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.Locale; -import java.util.Map; -import java.util.Map.Entry; - public final class HdfsRepository extends BlobStoreRepository implements FileContextFactory { private final BlobPath basePath; @@ -104,7 +109,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); blobStore = new HdfsBlobStore(repositorySettings.settings(), this, hdfsPath); } catch (IOException e) { - throw new RuntimeException(e); + throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", actualUri), e); } super.doStart(); } @@ -146,20 +151,12 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon } } if (fc == null) { - Thread th = Thread.currentThread(); - ClassLoader oldCL = th.getContextClassLoader(); - try { - th.setContextClassLoader(getClass().getClassLoader()); - return initFileContext(repositorySettings); - } catch (IOException ex) { - throw ex; - } finally { - th.setContextClassLoader(oldCL); - } + return initFileContext(repositorySettings); } return fc; } + @SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") private FileContext initFileContext(RepositorySettings repositorySettings) throws IOException { Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true)); @@ -170,23 +167,34 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon for (Entry entry : map.entrySet()) { cfg.set(entry.getKey(), entry.getValue()); } - + + // create a hadoop user. if we want other auth, it must be done different anyway, and tested. + Subject subject; try { - UserGroupInformation.setConfiguration(cfg); - } catch (Throwable th) { - throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot initialize Hadoop"), th); + Class clazz = Class.forName("org.apache.hadoop.security.User"); + Constructor ctor = clazz.getConstructor(String.class); + ctor.setAccessible(true); + Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name")); + subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet()); + } catch (ReflectiveOperationException e) { + throw new RuntimeException(e); } URI actualUri = URI.create(uri); - try { - // disable FS cache - cfg.setBoolean("fs.hdfs.impl.disable.cache", true); + // disable FS cache + cfg.setBoolean("fs.hdfs.impl.disable.cache", true); - // create the AFS manually since through FileContext is relies on Subject.doAs for no reason at all - AbstractFileSystem fs = AbstractFileSystem.get(actualUri, cfg); - return FileContext.getFileContext(fs, cfg); - } catch (Exception ex) { - throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create Hdfs file-system for uri [%s]", actualUri), ex); + // create the AFS manually since through FileContext is relies on Subject.doAs for no reason at all + try { + return Subject.doAs(subject, new PrivilegedExceptionAction() { + @Override + public FileContext run() throws IOException { + AbstractFileSystem fs = AbstractFileSystem.get(actualUri, cfg); + return FileContext.getFileContext(fs, cfg); + } + }); + } catch (PrivilegedActionException e) { + throw (IOException) e.getException(); } } diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index 2aaf5717275..83e5599a9c8 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -18,16 +18,9 @@ */ grant { - // used by the plugin to get the TCCL to properly initialize all of Hadoop components + // Hadoop UserGroupInformation clinit permission java.lang.RuntimePermission "getClassLoader"; - // set TCCL used for bootstrapping Hadoop Configuration and JAAS - permission java.lang.RuntimePermission "setContextClassLoader"; - - // - // Hadoop 2 - // - // UserGroupInformation (UGI) Metrics clinit permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; @@ -38,20 +31,8 @@ grant { // org.apache.hadoop.util.ShutdownHookManager clinit permission java.lang.RuntimePermission "shutdownHooks"; - // UGI triggers JAAS + // JAAS is used always, we use a fake subject, hurts nobody permission javax.security.auth.AuthPermission "getSubject"; - - // JAAS libraries are not loaded with the proper context in Hadoop, hence why the permission is needed here - permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; - permission java.lang.RuntimePermission "loadLibrary.jaas_unix"; - - // plus LoginContext - permission javax.security.auth.AuthPermission "modifyPrincipals"; - - permission javax.security.auth.AuthPermission "modifyPublicCredentials"; - - permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; - permission javax.security.auth.AuthPermission "doAs"; - - }; + permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; +}; From 795869c345de12fe81517fc8491bb6385737f29b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 20:55:17 -0500 Subject: [PATCH 213/322] remove filecontextfactory --- .../repositories/hdfs/FileContextFactory.java | 28 ------------------- .../repositories/hdfs/HdfsBlobContainer.java | 20 ++++++------- .../repositories/hdfs/HdfsBlobStore.java | 16 +++++------ .../repositories/hdfs/HdfsRepository.java | 5 ++-- .../repositories/hdfs/SecurityUtils.java | 4 +-- 5 files changed, 22 insertions(+), 51 deletions(-) delete mode 100644 plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java deleted file mode 100644 index 0080b7fe239..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileContextFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.repositories.hdfs; - -import org.apache.hadoop.fs.FileContext; - -import java.io.IOException; - -interface FileContextFactory { - - FileContext getFileContext() throws IOException; -} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 7269faaa8bc..78c9989e465 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -53,7 +53,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try { - return SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + return SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public Boolean doInHdfs(FileContext fc) throws IOException { return fc.util().exists(new Path(path, blobName)); @@ -67,7 +67,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { try { - SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public Boolean doInHdfs(FileContext fc) throws IOException { return fc.delete(new Path(path, blobName), true); @@ -80,7 +80,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { fc.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); @@ -92,17 +92,17 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + return SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public InputStream doInHdfs(FileContext fc) throws IOException { - return fc.open(new Path(path, blobName), blobStore.bufferSizeInBytes()); + return fc.open(new Path(path, blobName), blobStore.getBufferSizeInBytes()); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { Path blob = new Path(path, blobName); @@ -110,10 +110,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING // that should be fixed there, no need to bring truncation into this, give the user an error. EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); - CreateOpts[] opts = { CreateOpts.bufferSize(blobStore.bufferSizeInBytes()) }; + CreateOpts[] opts = { CreateOpts.bufferSize(blobStore.getBufferSizeInBytes()) }; try (FSDataOutputStream stream = fc.create(blob, flags, opts)) { int bytesRead; - byte[] buffer = new byte[blobStore.bufferSizeInBytes()]; + byte[] buffer = new byte[blobStore.getBufferSizeInBytes()]; while ((bytesRead = inputStream.read(buffer)) != -1) { stream.write(buffer, 0, bytesRead); // For safety we also hsync each write as well, because of its docs: @@ -130,7 +130,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + FileStatus[] files = SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { return (fc.util().listStatus(path, new PathFilter() { @@ -150,7 +150,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileContextFactory(), new FcCallback() { + FileStatus[] files = SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { return fc.util().listStatus(path); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 1cf6ea0c317..0ca1caaeced 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -34,13 +34,13 @@ import java.io.IOException; final class HdfsBlobStore extends AbstractComponent implements BlobStore { - private final FileContextFactory fcf; + private final HdfsRepository repository; private final Path root; private final int bufferSizeInBytes; - HdfsBlobStore(Settings settings, FileContextFactory fcf, Path root) throws IOException { + HdfsBlobStore(Settings settings, HdfsRepository repository, Path root) throws IOException { super(settings); - this.fcf = fcf; + this.repository = repository; this.root = root; this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); @@ -53,7 +53,7 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { } private void mkdirs(Path path) throws IOException { - SecurityUtils.execute(fcf, new FcCallback() { + SecurityUtils.execute(repository, new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { fc.mkdir(path, null, true); @@ -67,11 +67,11 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { return root.toUri().toString(); } - FileContextFactory fileContextFactory() { - return fcf; + HdfsRepository getRepository() { + return repository; } - int bufferSizeInBytes() { + int getBufferSizeInBytes() { return bufferSizeInBytes; } @@ -82,7 +82,7 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { @Override public void delete(BlobPath path) throws IOException { - SecurityUtils.execute(fcf, new FcCallback() { + SecurityUtils.execute(repository, new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { fc.delete(translateToHdfsPath(path), true); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 63aba8f9a78..97d7cbfce4b 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -50,7 +50,7 @@ import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -public final class HdfsRepository extends BlobStoreRepository implements FileContextFactory { +public final class HdfsRepository extends BlobStoreRepository { private final BlobPath basePath; private final ByteSizeValue chunkSize; @@ -115,8 +115,7 @@ public final class HdfsRepository extends BlobStoreRepository implements FileCon } // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. - @Override - public FileContext getFileContext() throws IOException { + FileContext getFileContext() throws IOException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { // unprivileged code such as scripts do not have SpecialPermission diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java index dda4d5e38ff..c68c5e1c43b 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java @@ -29,8 +29,8 @@ import java.security.PrivilegedExceptionAction; final class SecurityUtils { - static V execute(FileContextFactory fcf, FcCallback callback) throws IOException { - return execute(fcf.getFileContext(), callback); + static V execute(HdfsRepository repository, FcCallback callback) throws IOException { + return execute(repository.getFileContext(), callback); } static V execute(FileContext fc, FcCallback callback) throws IOException { From c54d53c8d54450b6747b25cd3b289f1073ccc93a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 21:11:06 -0500 Subject: [PATCH 214/322] streamline these classes a bit --- .../repositories/hdfs/HdfsBlobContainer.java | 26 +++++++++---------- .../repositories/hdfs/HdfsBlobStore.java | 22 +++------------- .../repositories/hdfs/HdfsRepository.java | 7 ++--- 3 files changed, 20 insertions(+), 35 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 78c9989e465..04d103bb4ac 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -41,19 +41,19 @@ import java.util.Map; final class HdfsBlobContainer extends AbstractBlobContainer { - private final HdfsBlobStore blobStore; + private final HdfsRepository repository; private final Path path; - HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) { + HdfsBlobContainer(BlobPath blobPath, HdfsRepository repository, Path path) { super(blobPath); - this.blobStore = blobStore; + this.repository = repository; this.path = path; } @Override public boolean blobExists(String blobName) { try { - return SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + return SecurityUtils.execute(repository, new FcCallback() { @Override public Boolean doInHdfs(FileContext fc) throws IOException { return fc.util().exists(new Path(path, blobName)); @@ -67,7 +67,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { try { - SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + SecurityUtils.execute(repository, new FcCallback() { @Override public Boolean doInHdfs(FileContext fc) throws IOException { return fc.delete(new Path(path, blobName), true); @@ -80,7 +80,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + SecurityUtils.execute(repository, new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { fc.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); @@ -92,17 +92,17 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + return SecurityUtils.execute(repository, new FcCallback() { @Override public InputStream doInHdfs(FileContext fc) throws IOException { - return fc.open(new Path(path, blobName), blobStore.getBufferSizeInBytes()); + return fc.open(new Path(path, blobName), repository.bufferSizeInBytes); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + SecurityUtils.execute(repository, new FcCallback() { @Override public Void doInHdfs(FileContext fc) throws IOException { Path blob = new Path(path, blobName); @@ -110,10 +110,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING // that should be fixed there, no need to bring truncation into this, give the user an error. EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); - CreateOpts[] opts = { CreateOpts.bufferSize(blobStore.getBufferSizeInBytes()) }; + CreateOpts[] opts = { CreateOpts.bufferSize(repository.bufferSizeInBytes) }; try (FSDataOutputStream stream = fc.create(blob, flags, opts)) { int bytesRead; - byte[] buffer = new byte[blobStore.getBufferSizeInBytes()]; + byte[] buffer = new byte[repository.bufferSizeInBytes]; while ((bytesRead = inputStream.read(buffer)) != -1) { stream.write(buffer, 0, bytesRead); // For safety we also hsync each write as well, because of its docs: @@ -130,7 +130,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + FileStatus[] files = SecurityUtils.execute(repository, new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { return (fc.util().listStatus(path, new PathFilter() { @@ -150,7 +150,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.getRepository(), new FcCallback() { + FileStatus[] files = SecurityUtils.execute(repository, new FcCallback() { @Override public FileStatus[] doInHdfs(FileContext fc) throws IOException { return fc.util().listStatus(path); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 0ca1caaeced..4937cc13faf 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -25,26 +25,18 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; import java.io.IOException; -final class HdfsBlobStore extends AbstractComponent implements BlobStore { +final class HdfsBlobStore implements BlobStore { private final HdfsRepository repository; private final Path root; - private final int bufferSizeInBytes; - HdfsBlobStore(Settings settings, HdfsRepository repository, Path root) throws IOException { - super(settings); + HdfsBlobStore(HdfsRepository repository, Path root) throws IOException { this.repository = repository; this.root = root; - this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); - try { mkdirs(root); } catch (FileAlreadyExistsException ok) { @@ -67,17 +59,9 @@ final class HdfsBlobStore extends AbstractComponent implements BlobStore { return root.toUri().toString(); } - HdfsRepository getRepository() { - return repository; - } - - int getBufferSizeInBytes() { - return bufferSizeInBytes; - } - @Override public BlobContainer blobContainer(BlobPath path) { - return new HdfsBlobContainer(path, this, buildHdfsPath(path)); + return new HdfsBlobContainer(path, repository, buildHdfsPath(path)); } @Override diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 97d7cbfce4b..5b5e6ab5e53 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoryName; @@ -55,6 +56,7 @@ public final class HdfsRepository extends BlobStoreRepository { private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; + final int bufferSizeInBytes; private final RepositorySettings repositorySettings; private final String path; private final String uri; @@ -66,14 +68,13 @@ public final class HdfsRepository extends BlobStoreRepository { super(name.getName(), repositorySettings, indexShardRepository); this.repositorySettings = repositorySettings; - uri = repositorySettings.settings().get("uri"); path = repositorySettings.settings().get("path"); - this.basePath = BlobPath.cleanPath(); this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); this.compress = repositorySettings.settings().getAsBoolean("compress", false); + this.bufferSizeInBytes = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); } @Override @@ -107,7 +108,7 @@ public final class HdfsRepository extends BlobStoreRepository { } }); logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(repositorySettings.settings(), this, hdfsPath); + blobStore = new HdfsBlobStore(this, hdfsPath); } catch (IOException e) { throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", actualUri), e); } From af7d6b629c6aad02637c93c42ac03453181da1d9 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 21 Dec 2015 18:32:04 -0800 Subject: [PATCH 215/322] Change hdfs unit tests to be a single node test instead of integ test --- .../repositories/hdfs/HdfsTests.java | 35 +++++--- .../test/ESSingleNodeTestCase.java | 82 +++++++++++++------ 2 files changed, 81 insertions(+), 36 deletions(-) diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index 9e833ad6c72..d234ec2f132 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -22,6 +22,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import java.util.Collection; +import java.util.Collections; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -36,12 +37,12 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.ESSingleNodeTestCase; -@ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) -public class HdfsTests extends ESIntegTestCase { +public class HdfsTests extends ESSingleNodeTestCase { @Override - protected Collection> nodePlugins() { + protected Collection> getPlugins() { return pluginList(HdfsPlugin.class); } @@ -59,16 +60,18 @@ public class HdfsTests extends ESIntegTestCase { ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + createIndex("test-idx-1"); + createIndex("test-idx-2"); + createIndex("test-idx-3"); ensureGreen(); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); - index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i); - index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i); + client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); } - refresh(); + client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(100L)); assertThat(count(client, "test-idx-2"), equalTo(100L)); assertThat(count(client, "test-idx-3"), equalTo(100L)); @@ -90,7 +93,7 @@ public class HdfsTests extends ESIntegTestCase { for (int i = 0; i < 100; i += 2) { client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); } - refresh(); + client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(50L)); assertThat(count(client, "test-idx-2"), equalTo(50L)); assertThat(count(client, "test-idx-3"), equalTo(50L)); @@ -109,7 +112,7 @@ public class HdfsTests extends ESIntegTestCase { // Test restore after index deletion logger.info("--> delete indices"); - cluster().wipeIndices("test-idx-1", "test-idx-2"); + client().admin().indices().prepareDelete("test-idx-1", "test-idx-2").get(); logger.info("--> restore one index after deletion"); restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); @@ -135,7 +138,9 @@ public class HdfsTests extends ESIntegTestCase { .get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + createIndex("test-idx-1"); + createIndex("test-idx-2"); + createIndex("test-idx-3"); ensureGreen(); fail("Path name is invalid"); } catch (RepositoryException re) { @@ -157,7 +162,9 @@ public class HdfsTests extends ESIntegTestCase { .get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + createIndex("test-idx-1"); + createIndex("test-idx-2"); + createIndex("test-idx-3"); ensureGreen(); fail("Path name is invalid"); } catch (RepositoryException re) { @@ -179,7 +186,9 @@ public class HdfsTests extends ESIntegTestCase { .get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + createIndex("test-idx-1"); + createIndex("test-idx-2"); + createIndex("test-idx-3"); ensureGreen(); fail("Path name is invalid"); } catch (RepositoryException re) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 287bd121c90..9b06bae21b0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -37,15 +38,22 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -59,13 +67,13 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private static Node NODE = null; - private static void reset() { + private void reset() { assert NODE != null; stopNode(); startNode(); } - private static void startNode() { + private void startNode() { assert NODE == null; NODE = newNode(); // we must wait for the node to actually be up and running. otherwise the node might have started, elected itself master but might not yet have removed the @@ -80,7 +88,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { Releasables.close(node); } - static void cleanup(boolean resetNode) { + private void cleanup(boolean resetNode) { assertAcked(client().admin().indices().prepareDelete("*").get()); if (resetNode) { reset(); @@ -92,7 +100,19 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { metaData.transientSettings().getAsMap().size(), equalTo(0)); } + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + // Create the node lazily, on the first test. This is ok because we do not randomize any settings, + // only the cluster name. This allows us to have overriden properties for plugins and the version to use. + if (NODE == null) { + startNode(); + } + } + @After + @Override public void tearDown() throws Exception { logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName()); super.tearDown(); @@ -102,7 +122,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { @BeforeClass public static void setUpClass() throws Exception { stopNode(); - startNode(); } @AfterClass @@ -119,25 +138,42 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return false; } - private static Node newNode() { - Node build = new Node(Settings.builder() - .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) - .put("path.home", createTempDir()) - // TODO: use a consistent data path for custom paths - // This needs to tie into the ESIntegTestCase#indexSettings() method - .put("path.shared_data", createTempDir().getParent()) - .put("node.name", nodeName()) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put("script.inline", "on") - .put("script.indexed", "on") - .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created - .put("http.enabled", false) - .put("node.local", true) - .put("node.data", true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) - .build() - ); + /** The version of elasticsearch the node should act like. */ + protected Version getVersion() { + return Version.CURRENT; + } + + /** The plugin classes that should be added to the node. */ + protected Collection> getPlugins() { + return Collections.emptyList(); + } + + /** Helper method to create list of plugins without specifying generic types. */ + @SafeVarargs + @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which casues this warning + protected final Collection> pluginList(Class... plugins) { + return Arrays.asList(plugins); + } + + private Node newNode() { + Settings settings = Settings.builder() + .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put("path.home", createTempDir()) + // TODO: use a consistent data path for custom paths + // This needs to tie into the ESIntegTestCase#indexSettings() method + .put("path.shared_data", createTempDir().getParent()) + .put("node.name", nodeName()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put("script.inline", "on") + .put("script.indexed", "on") + .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created + .put("http.enabled", false) + .put("node.local", true) + .put("node.data", true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) + .build(); + Node build = new MockNode(settings, getVersion(), getPlugins()); build.start(); assertThat(DiscoveryNode.localNode(build.settings()), is(true)); return build; From 51f6519e71c312e0a0227127ed5e7f9a1b0e1653 Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Tue, 22 Dec 2015 11:53:29 +0900 Subject: [PATCH 216/322] Analysis : Fix no response from Analyze API without specified index fix test --- .../java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 80d5a6080b6..335a9d38fba 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -512,7 +512,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { fail("shouldn't get here"); } catch (Throwable t) { assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), startsWith("failed to find analyzer")); + assertThat(t.getMessage(), startsWith("failed to find global analyzer")); } From d104d6d6520b55d053c60d1f45559992f049a7a3 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 21 Dec 2015 19:38:54 -0800 Subject: [PATCH 217/322] Refactor hdfs unit tests to be simple and check every configuration error condition --- .../repositories/hdfs/HdfsRepository.java | 8 +- .../repositories/hdfs/HdfsTests.java | 105 ++++++++---------- 2 files changed, 52 insertions(+), 61 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 5b5e6ab5e53..9fd7a097b57 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -79,18 +79,18 @@ public final class HdfsRepository extends BlobStoreRepository { @Override protected void doStart() { - if (!Strings.hasText(uri)) { + if (Strings.hasText(uri) == false) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } URI actualUri = URI.create(uri); String scheme = actualUri.getScheme(); - if (!Strings.hasText(scheme) || !scheme.toLowerCase(Locale.ROOT).equals("hdfs")) { + if (Strings.hasText(scheme) == false || scheme.toLowerCase(Locale.ROOT).equals("hdfs") == false) { throw new IllegalArgumentException( String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", scheme, uri)); } String p = actualUri.getPath(); - if (Strings.hasText(p) && !p.equals("/")) { + if (Strings.hasText(p) && p.equals("/") == false) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", p, uri)); } @@ -167,7 +167,7 @@ public final class HdfsRepository extends BlobStoreRepository { for (Entry entry : map.entrySet()) { cfg.set(entry.getKey(), entry.getValue()); } - + // create a hadoop user. if we want other auth, it must be done different anyway, and tested. Subject subject; try { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index d234ec2f132..a79a753fb57 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -123,76 +123,67 @@ public class HdfsTests extends ESSingleNodeTestCase { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); } - public void testWrongPath() { - Client client = client(); - + public void testMissingUri() { try { - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") - .setType("hdfs") - .setSettings(Settings.settingsBuilder() - .put("uri", "hdfs:///") - .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) - .put("path", "a@b$c#11:22") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean())) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.EMPTY).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("No 'uri' defined for hdfs")); + } + } - createIndex("test-idx-1"); - createIndex("test-idx-2"); - createIndex("test-idx-3"); - ensureGreen(); - fail("Path name is invalid"); - } catch (RepositoryException re) { - // expected + public void testEmptyUri() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "/path").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage(), e.getCause().getMessage().contains("Invalid scheme [null] specified in uri [/path]")); } } public void testNonHdfsUri() { - Client client = client(); try { - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") - .setType("hdfs") - .setSettings(Settings.settingsBuilder() - .put("uri", "file:///") - .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) - .put("path", "should-fail") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean())) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - - createIndex("test-idx-1"); - createIndex("test-idx-2"); - createIndex("test-idx-3"); - ensureGreen(); - fail("Path name is invalid"); - } catch (RepositoryException re) { - // expected + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "file:///").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("Invalid scheme [file] specified in uri [file:///]")); } } public void testPathSpecifiedInHdfs() { - Client client = client(); try { - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") - .setType("hdfs") - .setSettings(Settings.settingsBuilder() - .put("uri", "hdfs:///some/path") - .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) - .put("path", "should-fail") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean())) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "hdfs:///some/path").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("Use 'path' option to specify a path [/some/path]")); + } + } - createIndex("test-idx-1"); - createIndex("test-idx-2"); - createIndex("test-idx-3"); - ensureGreen(); - fail("Path name is invalid"); - } catch (RepositoryException re) { - // expected + public void testMissingPath() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "hdfs:///").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("No 'path' defined for hdfs")); } } From a587ba110c44ddcf9d61dab20ab58587c92c6c87 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 23:44:36 -0500 Subject: [PATCH 218/322] add some safety around repository --- .../repositories/hdfs/FcCallback.java | 29 --- .../repositories/hdfs/HdfsBlobContainer.java | 42 ++--- .../repositories/hdfs/HdfsBlobStore.java | 8 +- .../repositories/hdfs/HdfsPlugin.java | 1 + .../repositories/hdfs/HdfsRepository.java | 169 +++++++++--------- .../repositories/hdfs/SecurityUtils.java | 54 ------ 6 files changed, 106 insertions(+), 197 deletions(-) delete mode 100644 plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java delete mode 100644 plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java deleted file mode 100644 index c430d4f6aed..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FcCallback.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.hdfs; - -import org.apache.hadoop.fs.FileContext; - -import java.io.IOException; - -interface FcCallback { - - V doInHdfs(FileContext fc) throws IOException; -} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 04d103bb4ac..7e9740aac7a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -53,10 +53,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try { - return SecurityUtils.execute(repository, new FcCallback() { + return repository.execute(new HdfsRepository.Operation() { @Override - public Boolean doInHdfs(FileContext fc) throws IOException { - return fc.util().exists(new Path(path, blobName)); + public Boolean run(FileContext fileContext) throws IOException { + return fileContext.util().exists(new Path(path, blobName)); } }); } catch (Exception e) { @@ -67,10 +67,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { try { - SecurityUtils.execute(repository, new FcCallback() { + repository.execute(new HdfsRepository.Operation() { @Override - public Boolean doInHdfs(FileContext fc) throws IOException { - return fc.delete(new Path(path, blobName), true); + public Boolean run(FileContext fileContext) throws IOException { + return fileContext.delete(new Path(path, blobName), true); } }); } catch (FileNotFoundException ok) { @@ -80,10 +80,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - SecurityUtils.execute(repository, new FcCallback() { + repository.execute(new HdfsRepository.Operation() { @Override - public Void doInHdfs(FileContext fc) throws IOException { - fc.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); + public Void run(FileContext fileContext) throws IOException { + fileContext.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); return null; } }); @@ -92,26 +92,26 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return SecurityUtils.execute(repository, new FcCallback() { + return repository.execute(new HdfsRepository.Operation() { @Override - public InputStream doInHdfs(FileContext fc) throws IOException { - return fc.open(new Path(path, blobName), repository.bufferSizeInBytes); + public InputStream run(FileContext fileContext) throws IOException { + return fileContext.open(new Path(path, blobName), repository.bufferSizeInBytes); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SecurityUtils.execute(repository, new FcCallback() { + repository.execute(new HdfsRepository.Operation() { @Override - public Void doInHdfs(FileContext fc) throws IOException { + public Void run(FileContext fileContext) throws IOException { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING // that should be fixed there, no need to bring truncation into this, give the user an error. EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); CreateOpts[] opts = { CreateOpts.bufferSize(repository.bufferSizeInBytes) }; - try (FSDataOutputStream stream = fc.create(blob, flags, opts)) { + try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { int bytesRead; byte[] buffer = new byte[repository.bufferSizeInBytes]; while ((bytesRead = inputStream.read(buffer)) != -1) { @@ -130,10 +130,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = SecurityUtils.execute(repository, new FcCallback() { + FileStatus[] files = repository.execute(new HdfsRepository.Operation() { @Override - public FileStatus[] doInHdfs(FileContext fc) throws IOException { - return (fc.util().listStatus(path, new PathFilter() { + public FileStatus[] run(FileContext fileContext) throws IOException { + return (fileContext.util().listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(blobNamePrefix); @@ -150,10 +150,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = SecurityUtils.execute(repository, new FcCallback() { + FileStatus[] files = repository.execute(new HdfsRepository.Operation() { @Override - public FileStatus[] doInHdfs(FileContext fc) throws IOException { - return fc.util().listStatus(path); + public FileStatus[] run(FileContext fileContext) throws IOException { + return fileContext.util().listStatus(path); } }); Map map = new LinkedHashMap(); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 4937cc13faf..1d182925f7e 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -45,9 +45,9 @@ final class HdfsBlobStore implements BlobStore { } private void mkdirs(Path path) throws IOException { - SecurityUtils.execute(repository, new FcCallback() { + repository.execute(new HdfsRepository.Operation() { @Override - public Void doInHdfs(FileContext fc) throws IOException { + public Void run(FileContext fc) throws IOException { fc.mkdir(path, null, true); return null; } @@ -66,9 +66,9 @@ final class HdfsBlobStore implements BlobStore { @Override public void delete(BlobPath path) throws IOException { - SecurityUtils.execute(repository, new FcCallback() { + repository.execute(new HdfsRepository.Operation() { @Override - public Void doInHdfs(FileContext fc) throws IOException { + public Void run(FileContext fc) throws IOException { fc.delete(translateToHdfsPath(path), true); return null; } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java index 32d787442a9..e85f0c0c865 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -62,6 +62,7 @@ public final class HdfsPlugin extends Plugin { Class.forName("org.apache.hadoop.security.UserGroupInformation"); Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.ShutdownHookManager"); + Class.forName("org.apache.hadoop.conf.Configuration"); } catch (ClassNotFoundException | IOException e) { throw new RuntimeException(e); } finally { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 9fd7a097b57..3e25136e264 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -23,6 +23,7 @@ import java.lang.reflect.Constructor; import java.net.URI; import java.security.AccessController; import java.security.Principal; +import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Collections; @@ -36,6 +37,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.UnsupportedFileSystemException; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; @@ -53,25 +56,20 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; public final class HdfsRepository extends BlobStoreRepository { - private final BlobPath basePath; + private final BlobPath basePath = BlobPath.cleanPath(); + private final RepositorySettings repositorySettings; private final ByteSizeValue chunkSize; private final boolean compress; final int bufferSizeInBytes; - private final RepositorySettings repositorySettings; - private final String path; - private final String uri; - private FileContext fc; + private HdfsBlobStore blobStore; + private volatile FileContext fileContext; @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); - this.repositorySettings = repositorySettings; - uri = repositorySettings.settings().get("uri"); - path = repositorySettings.settings().get("path"); - this.basePath = BlobPath.cleanPath(); this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); this.compress = repositorySettings.settings().getAsBoolean("compress", false); this.bufferSizeInBytes = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); @@ -79,88 +77,57 @@ public final class HdfsRepository extends BlobStoreRepository { @Override protected void doStart() { - if (Strings.hasText(uri) == false) { + String pathSetting = repositorySettings.settings().get("path"); + // get configuration + if (pathSetting == null) { + throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); + } + + String uriSetting = repositorySettings.settings().get("uri"); + if (!Strings.hasText(uriSetting)) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } - URI actualUri = URI.create(uri); - String scheme = actualUri.getScheme(); - if (Strings.hasText(scheme) == false || scheme.toLowerCase(Locale.ROOT).equals("hdfs") == false) { + URI uri = URI.create(uriSetting); + if (!"hdfs".equalsIgnoreCase(uri.getScheme())) { throw new IllegalArgumentException( - String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", scheme, uri)); + String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting)); } - String p = actualUri.getPath(); - if (Strings.hasText(p) && p.equals("/") == false) { + if (Strings.hasLength(uri.getPath()) && uri.getPath().equals("/") == false) { throw new IllegalArgumentException(String.format(Locale.ROOT, - "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", p, uri)); - } - - // get configuration - if (path == null) { - throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); + "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); } try { - fc = getFileContext(); - Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { + // initialize our filecontext + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + fileContext = AccessController.doPrivileged(new PrivilegedAction() { @Override - public Path doInHdfs(FileContext fc) throws IOException { - return fc.makeQualified(new Path(path)); + public FileContext run() { + return createContext(uri, repositorySettings); } }); - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); + Path hdfsPath = execute(new Operation() { + @Override + public Path run(FileContext fileContext) throws IOException { + return fileContext.makeQualified(new Path(pathSetting)); + } + }); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), hdfsPath); blobStore = new HdfsBlobStore(this, hdfsPath); } catch (IOException e) { - throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", actualUri), e); + throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e); } super.doStart(); } - - // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. - FileContext getFileContext() throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { - @Override - public FileContext run() throws IOException { - return doGetFileContext(); - } - }); - } catch (PrivilegedActionException pae) { - throw (IOException) pae.getException(); - } - } - - private FileContext doGetFileContext() throws IOException { - // check if the fs is still alive - // make a cheap call that triggers little to no security checks - if (fc != null) { - try { - fc.util().exists(fc.getWorkingDirectory()); - } catch (IOException ex) { - if (ex.getMessage().contains("Filesystem closed")) { - fc = null; - } - else { - throw ex; - } - } - } - if (fc == null) { - return initFileContext(repositorySettings); - } - return fc; - } - + + // create hadoop filecontext @SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") - private FileContext initFileContext(RepositorySettings repositorySettings) throws IOException { - + private static FileContext createContext(URI uri, RepositorySettings repositorySettings) { Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true)); - cfg.setClassLoader(this.getClass().getClassLoader()); + cfg.setClassLoader(HdfsRepository.class.getClassLoader()); cfg.reloadConfiguration(); Map map = repositorySettings.settings().getByPrefix("conf.").getAsMap(); @@ -168,7 +135,7 @@ public final class HdfsRepository extends BlobStoreRepository { cfg.set(entry.getKey(), entry.getValue()); } - // create a hadoop user. if we want other auth, it must be done different anyway, and tested. + // create a hadoop user. if we want some auth, it must be done different anyway, and tested. Subject subject; try { Class clazz = Class.forName("org.apache.hadoop.security.User"); @@ -180,22 +147,21 @@ public final class HdfsRepository extends BlobStoreRepository { throw new RuntimeException(e); } - URI actualUri = URI.create(uri); // disable FS cache cfg.setBoolean("fs.hdfs.impl.disable.cache", true); - // create the AFS manually since through FileContext is relies on Subject.doAs for no reason at all - try { - return Subject.doAs(subject, new PrivilegedExceptionAction() { - @Override - public FileContext run() throws IOException { - AbstractFileSystem fs = AbstractFileSystem.get(actualUri, cfg); + // create the filecontext with our user + return Subject.doAs(subject, new PrivilegedAction() { + @Override + public FileContext run() { + try { + AbstractFileSystem fs = AbstractFileSystem.get(uri, cfg); return FileContext.getFileContext(fs, cfg); + } catch (UnsupportedFileSystemException e) { + throw new RuntimeException(e); } - }); - } catch (PrivilegedActionException e) { - throw (IOException) e.getException(); - } + } + }); } @Override @@ -221,9 +187,34 @@ public final class HdfsRepository extends BlobStoreRepository { @Override protected void doClose() throws ElasticsearchException { super.doClose(); - - // TODO: FileContext does not support any close - is there really no way - // to handle it? - fc = null; + fileContext = null; + } + + interface Operation { + V run(FileContext fileContext) throws IOException; + } + + /** + * Executes the provided operation against this repository + */ + V execute(Operation operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + // unprivileged code such as scripts do not have SpecialPermission + sm.checkPermission(new SpecialPermission()); + } + if (fileContext == null) { + throw new AlreadyClosedException("repository is closed: " + repositoryName); + } + try { + return AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + public V run() throws IOException { + return operation.run(fileContext); + } + }); + } catch (PrivilegedActionException pae) { + throw (IOException) pae.getException(); + } } } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java deleted file mode 100644 index c68c5e1c43b..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.hdfs; - -import org.apache.hadoop.fs.FileContext; -import org.elasticsearch.SpecialPermission; - -import java.io.IOException; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; - -final class SecurityUtils { - - static V execute(HdfsRepository repository, FcCallback callback) throws IOException { - return execute(repository.getFileContext(), callback); - } - - static V execute(FileContext fc, FcCallback callback) throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { - @Override - public V run() throws IOException { - return callback.doInHdfs(fc); - } - }); - } catch (PrivilegedActionException pae) { - throw (IOException) pae.getException(); - } - } -} From a04268e42ea3dc890919c2151337cb508eab1f56 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 23:52:16 -0500 Subject: [PATCH 219/322] reorder checks --- .../repositories/hdfs/HdfsRepository.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 3e25136e264..cec4e6b08a1 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -77,17 +77,10 @@ public final class HdfsRepository extends BlobStoreRepository { @Override protected void doStart() { - String pathSetting = repositorySettings.settings().get("path"); - // get configuration - if (pathSetting == null) { - throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); - } - String uriSetting = repositorySettings.settings().get("uri"); if (!Strings.hasText(uriSetting)) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } - URI uri = URI.create(uriSetting); if (!"hdfs".equalsIgnoreCase(uri.getScheme())) { throw new IllegalArgumentException( @@ -97,6 +90,13 @@ public final class HdfsRepository extends BlobStoreRepository { throw new IllegalArgumentException(String.format(Locale.ROOT, "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); } + + String pathSetting = repositorySettings.settings().get("path"); + // get configuration + if (pathSetting == null) { + throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); + } + try { // initialize our filecontext SecurityManager sm = System.getSecurityManager(); From 9573bb9f151303ffeffbc3bc94d1db35ffaa8809 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 22 Dec 2015 00:21:03 -0500 Subject: [PATCH 220/322] make sure BlobStore.close always triggers ACE on any access afterwards --- .../repositories/hdfs/HdfsBlobContainer.java | 30 ++++----- .../repositories/hdfs/HdfsBlobStore.java | 63 +++++++++++++++---- .../repositories/hdfs/HdfsRepository.java | 56 ++--------------- 3 files changed, 73 insertions(+), 76 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 7e9740aac7a..45eea7f00ef 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; +import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation; import java.io.FileNotFoundException; import java.io.IOException; @@ -40,20 +41,21 @@ import java.util.LinkedHashMap; import java.util.Map; final class HdfsBlobContainer extends AbstractBlobContainer { - - private final HdfsRepository repository; + private final HdfsBlobStore store; private final Path path; + private final int bufferSize; - HdfsBlobContainer(BlobPath blobPath, HdfsRepository repository, Path path) { + HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore store, Path path, int bufferSize) { super(blobPath); - this.repository = repository; + this.store = store; this.path = path; + this.bufferSize = bufferSize; } @Override public boolean blobExists(String blobName) { try { - return repository.execute(new HdfsRepository.Operation() { + return store.execute(new Operation() { @Override public Boolean run(FileContext fileContext) throws IOException { return fileContext.util().exists(new Path(path, blobName)); @@ -67,7 +69,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { try { - repository.execute(new HdfsRepository.Operation() { + store.execute(new Operation() { @Override public Boolean run(FileContext fileContext) throws IOException { return fileContext.delete(new Path(path, blobName), true); @@ -80,7 +82,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - repository.execute(new HdfsRepository.Operation() { + store.execute(new Operation() { @Override public Void run(FileContext fileContext) throws IOException { fileContext.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); @@ -92,17 +94,17 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return repository.execute(new HdfsRepository.Operation() { + return store.execute(new Operation() { @Override public InputStream run(FileContext fileContext) throws IOException { - return fileContext.open(new Path(path, blobName), repository.bufferSizeInBytes); + return fileContext.open(new Path(path, blobName), bufferSize); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - repository.execute(new HdfsRepository.Operation() { + store.execute(new Operation() { @Override public Void run(FileContext fileContext) throws IOException { Path blob = new Path(path, blobName); @@ -110,10 +112,10 @@ final class HdfsBlobContainer extends AbstractBlobContainer { // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING // that should be fixed there, no need to bring truncation into this, give the user an error. EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); - CreateOpts[] opts = { CreateOpts.bufferSize(repository.bufferSizeInBytes) }; + CreateOpts[] opts = { CreateOpts.bufferSize(bufferSize) }; try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { int bytesRead; - byte[] buffer = new byte[repository.bufferSizeInBytes]; + byte[] buffer = new byte[bufferSize]; while ((bytesRead = inputStream.read(buffer)) != -1) { stream.write(buffer, 0, bytesRead); // For safety we also hsync each write as well, because of its docs: @@ -130,7 +132,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = repository.execute(new HdfsRepository.Operation() { + FileStatus[] files = store.execute(new Operation() { @Override public FileStatus[] run(FileContext fileContext) throws IOException { return (fileContext.util().listStatus(path, new PathFilter() { @@ -150,7 +152,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = repository.execute(new HdfsRepository.Operation() { + FileStatus[] files = store.execute(new Operation() { @Override public FileStatus[] run(FileContext fileContext) throws IOException { return fileContext.util().listStatus(path); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 1d182925f7e..a9d1f3e2d68 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -21,22 +21,34 @@ package org.elasticsearch.repositories.hdfs; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; final class HdfsBlobStore implements BlobStore { - private final HdfsRepository repository; private final Path root; + private final FileContext fileContext; + private final int bufferSize; + private volatile boolean closed; - HdfsBlobStore(HdfsRepository repository, Path root) throws IOException { - this.repository = repository; - this.root = root; - + HdfsBlobStore(FileContext fileContext, String path, int bufferSize) throws IOException { + this.fileContext = fileContext; + this.bufferSize = bufferSize; + this.root = execute(new Operation() { + @Override + public Path run(FileContext fileContext) throws IOException { + return fileContext.makeQualified(new Path(path)); + } + }); try { mkdirs(root); } catch (FileAlreadyExistsException ok) { @@ -45,10 +57,10 @@ final class HdfsBlobStore implements BlobStore { } private void mkdirs(Path path) throws IOException { - repository.execute(new HdfsRepository.Operation() { + execute(new Operation() { @Override - public Void run(FileContext fc) throws IOException { - fc.mkdir(path, null, true); + public Void run(FileContext fileContext) throws IOException { + fileContext.mkdir(path, null, true); return null; } }); @@ -61,12 +73,12 @@ final class HdfsBlobStore implements BlobStore { @Override public BlobContainer blobContainer(BlobPath path) { - return new HdfsBlobContainer(path, repository, buildHdfsPath(path)); + return new HdfsBlobContainer(path, this, buildHdfsPath(path), bufferSize); } @Override public void delete(BlobPath path) throws IOException { - repository.execute(new HdfsRepository.Operation() { + execute(new Operation() { @Override public Void run(FileContext fc) throws IOException { fc.delete(translateToHdfsPath(path), true); @@ -94,9 +106,38 @@ final class HdfsBlobStore implements BlobStore { } return path; } + + + interface Operation { + V run(FileContext fileContext) throws IOException; + } + + /** + * Executes the provided operation against this store + */ + V execute(Operation operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + // unprivileged code such as scripts do not have SpecialPermission + sm.checkPermission(new SpecialPermission()); + } + if (closed) { + throw new AlreadyClosedException("HdfsBlobStore is closed: " + root); + } + try { + return AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + public V run() throws IOException { + return operation.run(fileContext); + } + }); + } catch (PrivilegedActionException pae) { + throw (IOException) pae.getException(); + } + } @Override public void close() { - // + closed = true; } } \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index cec4e6b08a1..e0fe49498d6 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -24,8 +24,6 @@ import java.net.URI; import java.security.AccessController; import java.security.Principal; import java.security.PrivilegedAction; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.Locale; import java.util.Map; @@ -36,10 +34,7 @@ import javax.security.auth.Subject; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; @@ -60,10 +55,8 @@ public final class HdfsRepository extends BlobStoreRepository { private final RepositorySettings repositorySettings; private final ByteSizeValue chunkSize; private final boolean compress; - final int bufferSizeInBytes; private HdfsBlobStore blobStore; - private volatile FileContext fileContext; @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { @@ -72,7 +65,6 @@ public final class HdfsRepository extends BlobStoreRepository { this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); this.compress = repositorySettings.settings().getAsBoolean("compress", false); - this.bufferSizeInBytes = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); } @Override @@ -96,6 +88,8 @@ public final class HdfsRepository extends BlobStoreRepository { if (pathSetting == null) { throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } + + int bufferSize = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); try { // initialize our filecontext @@ -103,20 +97,14 @@ public final class HdfsRepository extends BlobStoreRepository { if (sm != null) { sm.checkPermission(new SpecialPermission()); } - fileContext = AccessController.doPrivileged(new PrivilegedAction() { + FileContext fileContext = AccessController.doPrivileged(new PrivilegedAction() { @Override public FileContext run() { return createContext(uri, repositorySettings); } }); - Path hdfsPath = execute(new Operation() { - @Override - public Path run(FileContext fileContext) throws IOException { - return fileContext.makeQualified(new Path(pathSetting)); - } - }); - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(this, hdfsPath); + blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), pathSetting); } catch (IOException e) { throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e); } @@ -183,38 +171,4 @@ public final class HdfsRepository extends BlobStoreRepository { protected ByteSizeValue chunkSize() { return chunkSize; } - - @Override - protected void doClose() throws ElasticsearchException { - super.doClose(); - fileContext = null; - } - - interface Operation { - V run(FileContext fileContext) throws IOException; - } - - /** - * Executes the provided operation against this repository - */ - V execute(Operation operation) throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - if (fileContext == null) { - throw new AlreadyClosedException("repository is closed: " + repositoryName); - } - try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { - @Override - public V run() throws IOException { - return operation.run(fileContext); - } - }); - } catch (PrivilegedActionException pae) { - throw (IOException) pae.getException(); - } - } } From 91dcc9e0735997587e1f954a24b537d24c1a4065 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 22 Dec 2015 00:28:53 -0500 Subject: [PATCH 221/322] tidy up --- .../repositories/hdfs/HdfsBlobContainer.java | 16 +++---------- .../repositories/hdfs/HdfsBlobStore.java | 23 +++++++++---------- .../repositories/hdfs/HdfsTests.java | 4 ---- 3 files changed, 14 insertions(+), 29 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 45eea7f00ef..135e2f77810 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -131,14 +131,14 @@ final class HdfsBlobContainer extends AbstractBlobContainer { } @Override - public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { + public Map listBlobsByPrefix(final @Nullable String prefix) throws IOException { FileStatus[] files = store.execute(new Operation() { @Override public FileStatus[] run(FileContext fileContext) throws IOException { return (fileContext.util().listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { - return path.getName().startsWith(blobNamePrefix); + return prefix == null || path.getName().startsWith(prefix); } })); } @@ -152,16 +152,6 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = store.execute(new Operation() { - @Override - public FileStatus[] run(FileContext fileContext) throws IOException { - return fileContext.util().listStatus(path); - } - }); - Map map = new LinkedHashMap(); - for (FileStatus file : files) { - map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen())); - } - return Collections.unmodifiableMap(map); + return listBlobsByPrefix(null); } } \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index a9d1f3e2d68..0351ae28219 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -66,16 +66,6 @@ final class HdfsBlobStore implements BlobStore { }); } - @Override - public String toString() { - return root.toUri().toString(); - } - - @Override - public BlobContainer blobContainer(BlobPath path) { - return new HdfsBlobContainer(path, this, buildHdfsPath(path), bufferSize); - } - @Override public void delete(BlobPath path) throws IOException { execute(new Operation() { @@ -87,6 +77,16 @@ final class HdfsBlobStore implements BlobStore { }); } + @Override + public String toString() { + return root.toUri().toString(); + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + return new HdfsBlobContainer(path, this, buildHdfsPath(path), bufferSize); + } + private Path buildHdfsPath(BlobPath blobPath) { final Path path = translateToHdfsPath(blobPath); try { @@ -107,7 +107,6 @@ final class HdfsBlobStore implements BlobStore { return path; } - interface Operation { V run(FileContext fileContext) throws IOException; } @@ -122,7 +121,7 @@ final class HdfsBlobStore implements BlobStore { sm.checkPermission(new SpecialPermission()); } if (closed) { - throw new AlreadyClosedException("HdfsBlobStore is closed: " + root); + throw new AlreadyClosedException("HdfsBlobStore is closed: " + this); } try { return AccessController.doPrivileged(new PrivilegedExceptionAction() { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index a79a753fb57..0e838d17fd3 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -22,7 +22,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import java.util.Collection; -import java.util.Collections; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -34,9 +33,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.ESSingleNodeTestCase; public class HdfsTests extends ESSingleNodeTestCase { From 3a5d3a3bb0d2b3b4aee2ea5ba0839f315653379f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 22 Dec 2015 09:39:12 +0100 Subject: [PATCH 222/322] Remove deprecated query cache settings --- .../elasticsearch/cluster/ClusterModule.java | 1 - .../cache/request/IndicesRequestCache.java | 31 ++----------------- docs/reference/migration/migrate_3_0.asciidoc | 5 +++ 3 files changed, 7 insertions(+), 30 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 42517854f08..59b33bb03d1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -186,7 +186,6 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); - registerIndexDynamicSetting(IndicesRequestCache.DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); registerIndexDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME); registerIndexDynamicSetting(DefaultSearchContext.MAX_RESULT_WINDOW, Validator.POSITIVE_INTEGER); } diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index d783fcd96e3..a00cc7e787c 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -79,13 +79,9 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * since we are checking on the cluster state IndexMetaData always. */ public static final String INDEX_CACHE_REQUEST_ENABLED = "index.requests.cache.enable"; - @Deprecated - public static final String DEPRECATED_INDEX_CACHE_REQUEST_ENABLED = "index.cache.query.enable"; public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; - @Deprecated - public static final String DEPRECATED_INDICES_CACHE_QUERY_SIZE = "indices.cache.query.size"; public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire"; private static final Set CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH); @@ -113,19 +109,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis this.threadPool = threadPool; this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60)); - String size = settings.get(INDICES_CACHE_QUERY_SIZE); - if (size == null) { - size = settings.get(DEPRECATED_INDICES_CACHE_QUERY_SIZE); - if (size != null) { - deprecationLogger.deprecated("The [" + DEPRECATED_INDICES_CACHE_QUERY_SIZE - + "] settings is now deprecated, use [" + INDICES_CACHE_QUERY_SIZE + "] instead"); - } - } - if (size == null) { - // this cache can be very small yet still be very effective - size = "1%"; - } - this.size = size; + this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%"); this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null); buildCache(); @@ -135,18 +119,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis } private boolean isCacheEnabled(Settings settings, boolean defaultEnable) { - Boolean enable = settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, null); - if (enable == null) { - enable = settings.getAsBoolean(DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, null); - if (enable != null) { - deprecationLogger.deprecated("The [" + DEPRECATED_INDEX_CACHE_REQUEST_ENABLED - + "] settings is now deprecated, use [" + INDEX_CACHE_REQUEST_ENABLED + "] instead"); - } - } - if (enable == null) { - enable = defaultEnable; - } - return enable; + return settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, defaultEnable); } private void buildCache() { diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 8b6775b082c..d53740c198b 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -206,6 +206,11 @@ The `index.translog.flush_threshold_ops` setting is not supported anymore. In or growth use `index.translog.flush_threshold_size` instead. Changing the translog type with `index.translog.fs.type` is not supported anymore, the `buffered` implementation is now the only available option and uses a fixed `8kb` buffer. +==== Request Cache Settings + +The deprecated settings `index.cache.query.enable` and `indices.cache.query.size` have been removed and are replaced with +`index.requests.cache.enable` and `indices.requests.cache.size` respectively. + [[breaking_30_mapping_changes]] === Mapping changes From 8135a4ac9f8b938332ab43b0726267072e521012 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 22 Dec 2015 09:56:26 +0100 Subject: [PATCH 223/322] Remove `index.compound_on_flush` setting and default to `true` We added this undocumented realtime setting as backup plan long ago but to date we haven't had a situation where it was a problem. It's reducing the number of filehandles in the NRT case dramatically and should always be enabled. --- .../elasticsearch/cluster/ClusterModule.java | 1 - .../index/engine/EngineConfig.java | 21 ----- .../index/engine/InternalEngine.java | 3 +- .../elasticsearch/index/shard/IndexShard.java | 6 -- .../index/engine/InternalEngineIT.java | 88 ------------------- .../engine/InternalEngineSettingsTests.java | 8 +- .../index/engine/InternalEngineTests.java | 22 ++--- .../index/engine/ShadowEngineTests.java | 30 +++---- 8 files changed, 21 insertions(+), 158 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 42517854f08..9ed87457857 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -152,7 +152,6 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE, Validator.EMPTY); registerIndexDynamicSetting(IndexShard.INDEX_REFRESH_INTERVAL, Validator.TIME); registerIndexDynamicSetting(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, Validator.EMPTY); - registerIndexDynamicSetting(EngineConfig.INDEX_COMPOUND_ON_FLUSH, Validator.BOOLEAN); registerIndexDynamicSetting(EngineConfig.INDEX_GC_DELETES_SETTING, Validator.TIME); registerIndexDynamicSetting(IndexShard.INDEX_FLUSH_ON_CLOSE, Validator.BOOLEAN); registerIndexDynamicSetting(EngineConfig.INDEX_VERSION_MAP_SIZE, Validator.BYTES_SIZE_OR_PERCENTAGE); diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index c5da8e83b3d..b44265f37ad 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -53,7 +53,6 @@ public final class EngineConfig { private volatile ByteSizeValue indexingBufferSize; private volatile ByteSizeValue versionMapSize; private volatile String versionMapSizeSetting; - private volatile boolean compoundOnFlush = true; private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); private volatile boolean enableGcDeletes = true; private final TimeValue flushMergesAfter; @@ -73,11 +72,6 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; - /** - * Index setting for compound file on flush. This setting is realtime updateable. - */ - public static final String INDEX_COMPOUND_ON_FLUSH = "index.compound_on_flush"; - /** * Index setting to enable / disable deletes garbage collection. * This setting is realtime updateable @@ -132,7 +126,6 @@ public final class EngineConfig { this.similarity = similarity; this.codecService = codecService; this.eventListener = eventListener; - this.compoundOnFlush = settings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush); codecName = settings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); // We start up inactive and rely on IndexingMemoryController to give us our fair share once we start indexing: indexingBufferSize = IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER; @@ -208,13 +201,6 @@ public final class EngineConfig { return indexingBufferSize; } - /** - * Returns true iff flushed segments should be written as compound file system. Defaults to true - */ - public boolean isCompoundOnFlush() { - return compoundOnFlush; - } - /** * Returns the GC deletes cycle in milliseconds. */ @@ -346,13 +332,6 @@ public final class EngineConfig { this.gcDeletesInMillis = gcDeletesInMillis; } - /** - * Sets if flushed segments should be written as compound file system. Defaults to true - */ - public void setCompoundOnFlush(boolean compoundOnFlush) { - this.compoundOnFlush = compoundOnFlush; - } - /** * Returns the {@link org.elasticsearch.index.shard.TranslogRecoveryPerformer} for this engine. This class is used * to apply transaction log operations to the engine. It encapsulates all the logic to transfer the translog entry into diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 7911d219684..be2b73ea6c7 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -313,7 +313,6 @@ public class InternalEngine extends Engine { try { final LiveIndexWriterConfig iwc = indexWriter.getConfig(); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac()); - iwc.setUseCompoundFile(engineConfig.isCompoundOnFlush()); } catch (AlreadyClosedException ex) { // ignore } @@ -939,7 +938,7 @@ public class InternalEngine extends Engine { * here but with 1s poll this is only executed twice at most * in combination with the default writelock timeout*/ iwc.setWriteLockTimeout(5000); - iwc.setUseCompoundFile(this.engineConfig.isCompoundOnFlush()); + iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh // Warm-up hook for newly-merged segments. Warming up segments here is better since it will be performed at the end // of the merge operation and won't slow down _refresh iwc.setMergedSegmentWarmer(new IndexReaderWarmer() { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 4545787231a..84752af1e68 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1179,12 +1179,6 @@ public class IndexShard extends AbstractIndexShardComponent { change = true; } - final boolean compoundOnFlush = settings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, config.isCompoundOnFlush()); - if (compoundOnFlush != config.isCompoundOnFlush()) { - logger.info("updating {} from [{}] to [{}]", EngineConfig.INDEX_COMPOUND_ON_FLUSH, config.isCompoundOnFlush(), compoundOnFlush); - config.setCompoundOnFlush(compoundOnFlush); - change = true; - } final String versionMapSize = settings.get(EngineConfig.INDEX_VERSION_MAP_SIZE, config.getVersionMapSizeSetting()); if (config.getVersionMapSizeSetting().equals(versionMapSize) == false) { config.setVersionMapSizeSetting(versionMapSize); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java deleted file mode 100644 index 76c07edcb0d..00000000000 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.elasticsearch.action.admin.indices.segments.IndexSegments; -import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; -import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; -import org.elasticsearch.action.admin.indices.segments.ShardSegments; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -public class InternalEngineIT extends ESIntegTestCase { - public void testSetIndexCompoundOnFlush() { - client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("number_of_replicas", 0).put("number_of_shards", 1)).get(); - ensureGreen(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(1, 1, "test"); - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false)).get(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(1, 2, "test"); - - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true)).get(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(2, 3, "test"); - } - - private void assertTotalCompoundSegments(int i, int t, String index) { - IndicesSegmentResponse indicesSegmentResponse = client().admin().indices().prepareSegments(index).get(); - assertNotNull("indices segments response should contain indices", indicesSegmentResponse.getIndices()); - IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(index); - assertNotNull(indexSegments); - assertNotNull(indexSegments.getShards()); - Collection values = indexSegments.getShards().values(); - int compounds = 0; - int total = 0; - for (IndexShardSegments indexShardSegments : values) { - for (ShardSegments s : indexShardSegments) { - for (Segment segment : s) { - if (segment.isSearch() && segment.getNumDocs() > 0) { - if (segment.isCompound()) { - compounds++; - } - total++; - } - } - } - } - assertThat(compounds, Matchers.equalTo(i)); - assertThat(total, Matchers.equalTo(t)); - } - - private Set segments(IndexSegments segments) { - Set segmentSet = new HashSet<>(); - for (IndexShardSegments s : segments) { - for (ShardSegments shardSegments : s) { - segmentSet.addAll(shardSegments.getSegments()); - } - } - return segmentSet; - } -} diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index 1ed022dbefa..c0b4b6c84ce 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -36,10 +36,6 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { // INDEX_COMPOUND_ON_FLUSH InternalEngine engine = ((InternalEngine) EngineAccess.engine(service.getShardOrNull(0))); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); - client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build()).get(); - assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(false)); - client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build()).get(); - assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); // VERSION MAP SIZE @@ -61,7 +57,6 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { String versionMapString = versionMapAsPercent ? versionMapPercent + "%" : versionMapSizeInMB + "mb"; Settings build = Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush) .put(EngineConfig.INDEX_GC_DELETES_SETTING, gcDeletes, TimeUnit.MILLISECONDS) .put(EngineConfig.INDEX_VERSION_MAP_SIZE, versionMapString) .build(); @@ -69,8 +64,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { client().admin().indices().prepareUpdateSettings("foo").setSettings(build).get(); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); - assertEquals(engine.config().isCompoundOnFlush(), compoundOnFlush); - assertEquals(currentIndexWriterConfig.getUseCompoundFile(), compoundOnFlush); + assertEquals(currentIndexWriterConfig.getUseCompoundFile(), true); assertEquals(engine.config().getGcDeletesInMillis(), gcDeletes); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index c8960883b19..13fce88e2c1 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -169,7 +169,6 @@ public class InternalEngineTests extends ESTestCase { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, randomBoolean()) .put(EngineConfig.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us .put(EngineConfig.INDEX_CODEC_SETTING, codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -300,7 +299,6 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.isEmpty(), equalTo(true)); assertThat(engine.segmentsStats().getCount(), equalTo(0l)); assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0l)); - final boolean defaultCompound = defaultSettings.getSettings().getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -323,7 +321,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(0).ramTree, nullValue()); engine.flush(); @@ -335,10 +333,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); - - engine.config().setCompoundOnFlush(false); - engine.onSettingsChanged(); + assertThat(segments.get(0).isCompound(), equalTo(true)); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); @@ -357,14 +352,14 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); engine.delete(new Engine.Delete("test", "1", newUid("1"))); @@ -378,15 +373,14 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); - engine.config().setCompoundOnFlush(true); engine.onSettingsChanged(); ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("4"), doc4)); @@ -400,13 +394,13 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); assertThat(segments.get(2).isCommitted(), equalTo(false)); assertThat(segments.get(2).isSearch(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 339dac2e58a..214bc343a8a 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -118,7 +118,6 @@ public class ShadowEngineTests extends ESTestCase { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, randomBoolean()) .put(EngineConfig.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us .put(EngineConfig.INDEX_CODEC_SETTING, codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -280,7 +279,6 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.isEmpty(), equalTo(true)); assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l)); assertThat(primaryEngine.segmentsStats().getMemoryInBytes(), equalTo(0l)); - final boolean defaultCompound = defaultSettings.getSettings().getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -303,7 +301,7 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertTrue(segments.get(0).isCompound()); assertThat(segments.get(0).ramTree, nullValue()); // Check that the replica sees nothing @@ -331,7 +329,7 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); segments = replicaEngine.segments(false); assertThat(segments.size(), equalTo(1)); @@ -340,12 +338,9 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); - primaryEngine.config().setCompoundOnFlush(false); - primaryEngine.onSettingsChanged(); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -363,12 +358,12 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); // Make visible to shadow replica primaryEngine.flush(); @@ -387,12 +382,12 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); primaryEngine.delete(new Engine.Delete("test", "1", newUid("1"))); primaryEngine.refresh("test"); @@ -405,20 +400,17 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); // Make visible to shadow replica primaryEngine.flush(); replicaEngine.refresh("test"); - primaryEngine.config().setCompoundOnFlush(true); - primaryEngine.onSettingsChanged(); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("4"), doc4)); primaryEngine.refresh("test"); @@ -431,13 +423,13 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); assertThat(segments.get(2).isCommitted(), equalTo(false)); assertThat(segments.get(2).isSearch(), equalTo(true)); From 70d38760e6bb54936e9abc4d382746f5c175bd4b Mon Sep 17 00:00:00 2001 From: kwangsik Date: Tue, 22 Dec 2015 18:27:50 +0900 Subject: [PATCH 224/322] Put space into last comment line --- core/src/main/java/org/elasticsearch/action/ActionRequest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequest.java b/core/src/main/java/org/elasticsearch/action/ActionRequest.java index 24cf68025ba..45e7e76ca0b 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequest.java @@ -38,7 +38,7 @@ public abstract class ActionRequest extends TransportRe super(request); // this does not set the listenerThreaded API, if needed, its up to the caller to set it // since most times, we actually want it to not be threaded... - //this.listenerThreaded = request.listenerThreaded(); + // this.listenerThreaded = request.listenerThreaded(); } public abstract ActionRequestValidationException validate(); From f5e4cd46164630e09f308ed78c512eea8bda8a05 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Dec 2015 14:14:18 +0100 Subject: [PATCH 225/322] Remove recovery threadpools and throttle outgoing recoveries on the master Today we throttle recoveries only for incoming recoveries. Nodes that have a lot of primaries can get overloaded due to too many recoveries. To still keep that at bay we limit the number of threads that are sending files to the target to overcome this problem. The right solution here is to also throttle the outgoing recoveries that are today unbounded on the master and don't start the recovery until we have enough resources on both source and target nodes. The concurrency aspects of the recovery source also added a lot of complexity and additional threadpools that are hard to configure. This commit removes the concurrent streamns notion completely and sends files in the thread that drives the recovery simplifying the recovery code considerably. Outgoing recoveries are not throttled on the master via a allocation decider. --- .../elasticsearch/cluster/ClusterModule.java | 2 - .../routing/IndexShardRoutingTable.java | 1 - .../cluster/routing/RoutingNodes.java | 172 +++++++++++++++++- .../routing/allocation/AllocationService.java | 47 ++--- .../decider/ThrottlingAllocationDecider.java | 51 +++--- .../common/settings/ClusterSettings.java | 4 +- .../indices/recovery/RecoverySettings.java | 56 +----- .../recovery/RecoverySourceHandler.java | 113 +++--------- .../java/org/elasticsearch/node/Node.java | 1 - .../OldIndexBackwardsCompatibilityIT.java | 4 +- .../ack/AckClusterUpdateSettingsIT.java | 3 +- .../cluster/allocation/ClusterRerouteIT.java | 3 +- .../routing/RandomShardRoutingMutator.java | 9 +- .../cluster/routing/RoutingTableTests.java | 2 +- .../allocation/AllocationCommandsTests.java | 2 +- .../allocation/AllocationPriorityTests.java | 7 +- .../allocation/AwarenessAllocationTests.java | 12 +- .../allocation/BalanceConfigurationTests.java | 5 +- .../ConcurrentRebalanceRoutingTests.java | 4 +- .../allocation/DeadNodesAllocationTests.java | 6 +- ...ReplicaAsPrimaryDuringRelocationTests.java | 2 +- .../allocation/FailedShardsRoutingTests.java | 12 +- .../NodeVersionAllocationDeciderTests.java | 6 +- .../PreferPrimaryAllocationTests.java | 2 + .../PrimaryElectionRoutingTests.java | 4 +- ...yNotRelocatedWhileBeingRecoveredTests.java | 1 + .../RandomAllocationDeciderTests.java | 3 +- .../allocation/RebalanceAfterActiveTests.java | 2 +- .../ReplicaAllocatedAfterPrimaryTests.java | 2 +- .../RoutingNodesIntegrityTests.java | 2 + .../ShardsLimitAllocationTests.java | 8 +- .../SingleShardNoReplicasRoutingTests.java | 10 +- .../SingleShardOneReplicaRoutingTests.java | 2 +- .../allocation/StartedShardsRoutingTests.java | 6 +- .../allocation/ThrottlingAllocationTests.java | 161 +++++++++++++++- .../UpdateNumberOfReplicasTests.java | 2 +- .../decider/DiskThresholdDeciderTests.java | 25 +-- .../decider/EnableAllocationTests.java | 1 + .../structure/RoutingIteratorTests.java | 10 +- .../gateway/GatewayMetaStateTests.java | 4 +- .../gateway/RecoveryFromGatewayIT.java | 5 +- .../index/store/CorruptedFileIT.java | 5 +- .../recovery/RecoverySourceHandlerTests.java | 6 +- .../recovery/RecoverySettingsTests.java | 12 -- docs/reference/migration/migrate_3_0.asciidoc | 13 ++ .../cluster/shards_allocation.asciidoc | 22 +-- .../modules/indices/recovery.asciidoc | 6 - .../test/InternalTestCluster.java | 10 +- 48 files changed, 531 insertions(+), 317 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 25e86d9887f..c0370248ad7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -74,7 +74,6 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.ttl.IndicesTTLService; @@ -129,7 +128,6 @@ public class ClusterModule extends AbstractModule { registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class); } - private void registerBuiltinIndexSettings() { registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 6512ee5cef7..d425b63b34c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -106,7 +106,6 @@ public class IndexShardRoutingTable implements Iterable { } } this.allShardsStarted = allShardsStarted; - this.primary = primary; if (primary != null) { this.primaryAsList = Collections.singletonList(primary); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 8dd980c8bb3..e98eb9d4a47 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -69,6 +69,7 @@ public class RoutingNodes implements Iterable { private int relocatingShards = 0; private final Map> nodesPerAttributeNames = new HashMap<>(); + private final Map recoveryiesPerNode = new HashMap<>(); public RoutingNodes(ClusterState clusterState) { this(clusterState, true); @@ -91,6 +92,7 @@ public class RoutingNodes implements Iterable { // also fill replicaSet information for (ObjectCursor indexRoutingTable : routingTable.indicesRouting().values()) { for (IndexShardRoutingTable indexShard : indexRoutingTable.value) { + assert indexShard.primary != null; for (ShardRouting shard : indexShard) { // to get all the shards belonging to an index, including the replicas, // we define a replica set and keep track of it. A replica set is identified @@ -107,16 +109,18 @@ public class RoutingNodes implements Iterable { // add the counterpart shard with relocatingNodeId reflecting the source from which // it's relocating from. ShardRouting targetShardRouting = shard.buildTargetRelocatingShard(); + addInitialRecovery(targetShardRouting); if (readOnly) { targetShardRouting.freeze(); } entries.add(targetShardRouting); assignedShardsAdd(targetShardRouting); - } else if (!shard.active()) { // shards that are initializing without being relocated + } else if (shard.active() == false) { // shards that are initializing without being relocated if (shard.primary()) { inactivePrimaryCount++; } inactiveShardCount++; + addInitialRecovery(shard); } } else { final ShardRouting sr = getRouting(shard, readOnly); @@ -132,6 +136,79 @@ public class RoutingNodes implements Iterable { } } + private void addRecovery(ShardRouting routing) { + addRecovery(routing, true, false); + } + + private void removeRecovery(ShardRouting routing) { + addRecovery(routing, false, false); + } + + public void addInitialRecovery(ShardRouting routing) { + addRecovery(routing,true, true); + } + + private void addRecovery(final ShardRouting routing, final boolean increment, final boolean initializing) { + final int howMany = increment ? 1 : -1; + assert routing.initializing() : "routing must be initializing: " + routing; + Recoveries.getOrAdd(recoveryiesPerNode, routing.currentNodeId()).addIncoming(howMany); + final String sourceNodeId; + if (routing.relocatingNodeId() != null) { // this is a relocation-target + sourceNodeId = routing.relocatingNodeId(); + if (routing.primary() && increment == false) { // primary is done relocating + int numRecoveringReplicas = 0; + for (ShardRouting assigned : assignedShards(routing)) { + if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { + numRecoveringReplicas++; + } + } + // we transfer the recoveries to the relocated primary + recoveryiesPerNode.get(sourceNodeId).addOutgoing(-numRecoveringReplicas); + recoveryiesPerNode.get(routing.currentNodeId()).addOutgoing(numRecoveringReplicas); + } + } else if (routing.primary() == false) { // primary without relocationID is initial recovery + ShardRouting primary = findPrimary(routing); + if (primary == null && initializing) { + primary = routingTable.index(routing.index()).shard(routing.shardId().id()).primary; + } else if (primary == null) { + throw new IllegalStateException("replica is initializing but primary is unassigned"); + } + sourceNodeId = primary.currentNodeId(); + } else { + sourceNodeId = null; + } + if (sourceNodeId != null) { + Recoveries.getOrAdd(recoveryiesPerNode, sourceNodeId).addOutgoing(howMany); + } + } + + public int getIncomingRecoveries(String nodeId) { + return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getIncoming(); + } + + public int getOutgoingRecoveries(String nodeId) { + return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getOutgoing(); + } + + private ShardRouting findPrimary(ShardRouting routing) { + List shardRoutings = assignedShards.get(routing.shardId()); + ShardRouting primary = null; + if (shardRoutings != null) { + for (ShardRouting shardRouting : shardRoutings) { + if (shardRouting.primary()) { + if (shardRouting.active()) { + return shardRouting; + } else if (primary == null) { + primary = shardRouting; + } else if (primary.relocatingNodeId() != null) { + primary = shardRouting; + } + } + } + } + return primary; + } + private static ShardRouting getRouting(ShardRouting src, boolean readOnly) { if (readOnly) { src.freeze(); // we just freeze and reuse this instance if we are read only @@ -352,6 +429,7 @@ public class RoutingNodes implements Iterable { if (shard.primary()) { inactivePrimaryCount++; } + addRecovery(shard); assignedShardsAdd(shard); } @@ -367,6 +445,7 @@ public class RoutingNodes implements Iterable { ShardRouting target = shard.buildTargetRelocatingShard(); node(target.currentNodeId()).add(target); assignedShardsAdd(target); + addRecovery(target); return target; } @@ -383,9 +462,12 @@ public class RoutingNodes implements Iterable { inactivePrimaryCount--; } } + removeRecovery(shard); shard.moveToStarted(); } + + /** * Cancels a relocation of a shard that shard must relocating. */ @@ -440,6 +522,9 @@ public class RoutingNodes implements Iterable { cancelRelocation(shard); } assignedShardsRemove(shard); + if (shard.initializing()) { + removeRecovery(shard); + } } private void assignedShardsAdd(ShardRouting shard) { @@ -749,6 +834,54 @@ public class RoutingNodes implements Iterable { } } + for (Map.Entry recoveries : routingNodes.recoveryiesPerNode.entrySet()) { + String node = recoveries.getKey(); + final Recoveries value = recoveries.getValue(); + int incoming = 0; + int outgoing = 0; + RoutingNode routingNode = routingNodes.nodesToShards.get(node); + if (routingNode != null) { // node might have dropped out of the cluster + for (ShardRouting routing : routingNode) { + if (routing.initializing()) { + incoming++; + } else if (routing.relocating()) { + outgoing++; + } + if (routing.primary() && (routing.initializing() && routing.relocatingNodeId() != null) == false) { // we don't count the initialization end of the primary relocation + List shardRoutings = routingNodes.assignedShards.get(routing.shardId()); + for (ShardRouting assigned : shardRoutings) { + if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { + outgoing++; + } + } + } + } + } +// if (outgoing != value.outgoing) { +// incoming = 0; +// outgoing = 0; +// for (ShardRouting routing : routingNode) { +// if (routing.initializing()) { +// incoming++; +// } else if (routing.relocating()) { +// outgoing++; +// } +// if (routing.primary() && (routing.initializing() && routing.relocatingNodeId() != null) == false) { // we don't count the initialization end of the primary relocation +// List shardRoutings = routingNodes.assignedShards.get(routing.shardId()); +// for (ShardRouting assigned : shardRoutings) { +// if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { +// outgoing++; +// } +// } +// } +// } +// } + assert incoming == value.incoming : incoming + " != " + value.incoming; + assert outgoing == value.outgoing : outgoing + " != " + value.outgoing + " node: " + routingNode; + + } + + assert unassignedPrimaryCount == routingNodes.unassignedShards.getNumPrimaries() : "Unassigned primaries is [" + unassignedPrimaryCount + "] but RoutingNodes returned unassigned primaries [" + routingNodes.unassigned().getNumPrimaries() + "]"; assert unassignedIgnoredPrimaryCount == routingNodes.unassignedShards.getNumIgnoredPrimaries() : @@ -856,4 +989,41 @@ public class RoutingNodes implements Iterable { throw new IllegalStateException("can't modify RoutingNodes - readonly"); } } + + private static final class Recoveries { + private static final Recoveries EMPTY = new Recoveries(); + private int incoming = 0; + private int outgoing = 0; + + int getTotal() { + return incoming + outgoing; + } + + void addOutgoing(int howMany) { + assert outgoing + howMany >= 0 : outgoing + howMany+ " must be >= 0"; + outgoing += howMany; + } + + void addIncoming(int howMany) { + assert incoming + howMany >= 0 : incoming + howMany+ " must be >= 0"; + incoming += howMany; + } + + int getOutgoing() { + return outgoing; + } + + int getIncoming() { + return incoming; + } + + public static Recoveries getOrAdd(Map map, String key) { + Recoveries recoveries = map.get(key); + if (recoveries == null) { + recoveries = new Recoveries(); + map.put(key, recoveries); + } + return recoveries; + } + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index feafb76a5f2..774bcb078f5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -364,35 +365,17 @@ public class AllocationService extends AbstractComponent { private boolean electPrimariesAndUnassignedDanglingReplicas(RoutingAllocation allocation) { boolean changed = false; - RoutingNodes routingNodes = allocation.routingNodes(); + final RoutingNodes routingNodes = allocation.routingNodes(); if (routingNodes.unassigned().getNumPrimaries() == 0) { // move out if we don't have unassigned primaries return changed; } - - // go over and remove dangling replicas that are initializing for primary shards - List shardsToFail = new ArrayList<>(); - for (ShardRouting shardEntry : routingNodes.unassigned()) { - if (shardEntry.primary()) { - for (ShardRouting routing : routingNodes.assignedShards(shardEntry)) { - if (!routing.primary() && routing.initializing()) { - shardsToFail.add(routing); - } - } - - } - } - for (ShardRouting shardToFail : shardsToFail) { - changed |= applyFailedShard(allocation, shardToFail, false, - new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); - } - // now, go over and elect a new primary if possible, not, from this code block on, if one is elected, // routingNodes.hasUnassignedPrimaries() will potentially be false - for (ShardRouting shardEntry : routingNodes.unassigned()) { if (shardEntry.primary()) { + // remove dangling replicas that are initializing for primary shards + changed |= failReplicasForUnassignedPrimary(allocation, shardEntry); ShardRouting candidate = allocation.routingNodes().activeReplica(shardEntry); if (candidate != null) { IndexMetaData index = allocation.metaData().index(candidate.index()); @@ -457,6 +440,22 @@ public class AllocationService extends AbstractComponent { return changed; } + private boolean failReplicasForUnassignedPrimary(RoutingAllocation allocation, ShardRouting primary) { + List replicas = new ArrayList<>(); + for (ShardRouting routing : allocation.routingNodes().assignedShards(primary)) { + if (!routing.primary() && routing.initializing()) { + replicas.add(routing); + } + } + boolean changed = false; + for (ShardRouting routing : replicas) { + changed |= applyFailedShard(allocation, routing, false, + new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", + null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + } + return changed; + } + private boolean applyStartedShards(RoutingNodes routingNodes, Iterable startedShardEntries) { boolean dirty = false; // apply shards might be called several times with the same shard, ignore it @@ -523,7 +522,6 @@ public class AllocationService extends AbstractComponent { logger.debug("{} ignoring shard failure, unknown index in {} ({})", failedShard.shardId(), failedShard, unassignedInfo.shortSummary()); return false; } - RoutingNodes routingNodes = allocation.routingNodes(); RoutingNodes.RoutingNodeIterator matchedNode = routingNodes.routingNodeIter(failedShard.currentNodeId()); @@ -546,7 +544,10 @@ public class AllocationService extends AbstractComponent { logger.debug("{} ignoring shard failure, unknown allocation id in {} ({})", failedShard.shardId(), failedShard, unassignedInfo.shortSummary()); return false; } - + if (failedShard.primary()) { + // fail replicas first otherwise we move RoutingNodes into an inconsistent state + failReplicasForUnassignedPrimary(allocation, failedShard); + } // replace incoming instance to make sure we work on the latest one. Copy it to maintain information during modifications. failedShard = new ShardRouting(matchedNode.current()); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index bbd28104baf..25f43f57610 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -50,26 +50,36 @@ public class ThrottlingAllocationDecider extends AllocationDecider { public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = 2; public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = 4; public static final String NAME = "throttling"; - public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; - + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, Setting.Scope.CLUSTER); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", (s) -> s.get(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES,Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), true, Setting.Scope.CLUSTER); + private volatile int primariesInitialRecoveries; - private volatile int concurrentRecoveries; + private volatile int concurrentIncomingRecoveries; + private volatile int concurrentOutgoingRecoveries; + @Inject public ThrottlingAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.primariesInitialRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.get(settings); - this.concurrentRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.get(settings); - logger.debug("using node_concurrent_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentRecoveries, primariesInitialRecoveries); + concurrentIncomingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.get(settings); + concurrentOutgoingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, this::setPrimariesInitialRecoveries); - clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, this::setConcurrentRecoveries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, this::setConcurrentIncomingRecoverries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, this::setConcurrentOutgoingRecoverries); + + logger.debug("using node_concurrent_outgoing_recoveries [{}], node_concurrent_incoming_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentOutgoingRecoveries, concurrentIncomingRecoveries, primariesInitialRecoveries); } - private void setConcurrentRecoveries(int concurrentRecoveries) { - this.concurrentRecoveries = concurrentRecoveries; + private void setConcurrentIncomingRecoverries(int concurrentIncomingRecoveries) { + this.concurrentIncomingRecoveries = concurrentIncomingRecoveries; + } + private void setConcurrentOutgoingRecoverries(int concurrentOutgoingRecoveries) { + this.concurrentOutgoingRecoveries = concurrentOutgoingRecoveries; } private void setPrimariesInitialRecoveries(int primariesInitialRecoveries) { @@ -99,7 +109,7 @@ public class ThrottlingAllocationDecider extends AllocationDecider { } } } - + // TODO should we allow shards not allocated post API to always allocate? // either primary or replica doing recovery (from peer shard) // count the number of recoveries on the node, its for both target (INITIALIZING) and source (RELOCATING) @@ -108,17 +118,16 @@ public class ThrottlingAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(RoutingNode node, RoutingAllocation allocation) { - int currentRecoveries = 0; - for (ShardRouting shard : node) { - if (shard.initializing()) { - currentRecoveries++; - } - } - if (currentRecoveries >= concurrentRecoveries) { - return allocation.decision(Decision.THROTTLE, NAME, "too many shards currently recovering [%d], limit: [%d]", - currentRecoveries, concurrentRecoveries); - } else { - return allocation.decision(Decision.YES, NAME, "below shard recovery limit of [%d]", concurrentRecoveries); + int currentOutRecoveries = allocation.routingNodes().getOutgoingRecoveries(node.nodeId()); + int currentInRecoveries = allocation.routingNodes().getIncomingRecoveries(node.nodeId()); + if (currentOutRecoveries >= concurrentOutgoingRecoveries) { + return allocation.decision(Decision.THROTTLE, NAME, "too many outgoing shards currently recovering [%d], limit: [%d]", + currentOutRecoveries, concurrentOutgoingRecoveries); + } else if (currentInRecoveries >= concurrentIncomingRecoveries) { + return allocation.decision(Decision.THROTTLE, NAME, "too many incoming shards currently recovering [%d], limit: [%d]", + currentInRecoveries, concurrentIncomingRecoveries); + } else { + return allocation.decision(Decision.YES, NAME, "below shard recovery limit of outgoing: [%d] incoming: [%d]", concurrentOutgoingRecoveries, concurrentIncomingRecoveries); } } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 0e1dcf5a605..10c602688a4 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -109,8 +109,6 @@ public final class ClusterSettings extends AbstractScopedSettings { IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, MetaData.SETTING_READ_ONLY_SETTING, - RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, - RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, @@ -119,6 +117,8 @@ public final class ClusterSettings extends AbstractScopedSettings { RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, ThreadPool.THREADPOOL_GROUP_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 682b66e084e..c86309db136 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -29,19 +29,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.threadpool.ThreadPool; -import java.io.Closeable; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; +public class RecoverySettings extends AbstractComponent { -/** - */ -public class RecoverySettings extends AbstractComponent implements Closeable { - - public static final Setting INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_streams", 3, true, Setting.Scope.CLUSTER); - public static final Setting INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING = Setting.intSetting("indices.recovery.concurrent_small_file_streams", 2, true, Setting.Scope.CLUSTER); public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.CLUSTER); /** @@ -68,15 +58,8 @@ public class RecoverySettings extends AbstractComponent implements Closeable { */ public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); - public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes(); - public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); - private volatile int concurrentStreams; - private volatile int concurrentSmallFileStreams; - private final ThreadPoolExecutor concurrentStreamPool; - private final ThreadPoolExecutor concurrentSmallFileStreamPool; - private volatile ByteSizeValue maxBytesPerSec; private volatile SimpleRateLimiter rateLimiter; private volatile TimeValue retryDelayStateSync; @@ -101,14 +84,6 @@ public class RecoverySettings extends AbstractComponent implements Closeable { this.activityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings); - - this.concurrentStreams = INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.get(settings); - this.concurrentStreamPool = EsExecutors.newScaling("recovery_stream", 0, concurrentStreams, 60, TimeUnit.SECONDS, - EsExecutors.daemonThreadFactory(settings, "[recovery_stream]")); - this.concurrentSmallFileStreams = INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.get(settings); - this.concurrentSmallFileStreamPool = EsExecutors.newScaling("small_file_recovery_stream", 0, concurrentSmallFileStreams, 60, - TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[small_file_recovery_stream]")); - this.maxBytesPerSec = INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.get(settings); if (maxBytesPerSec.bytes() <= 0) { rateLimiter = null; @@ -116,11 +91,9 @@ public class RecoverySettings extends AbstractComponent implements Closeable { rateLimiter = new SimpleRateLimiter(maxBytesPerSec.mbFrac()); } - logger.debug("using max_bytes_per_sec[{}], concurrent_streams [{}]", - maxBytesPerSec, concurrentStreams); - clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING, this::setConcurrentStreams); - clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING, this::setConcurrentSmallFileStreams); + logger.debug("using max_bytes_per_sec[{}]", maxBytesPerSec); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, this::setRetryDelayStateSync); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, this::setRetryDelayNetwork); @@ -129,20 +102,6 @@ public class RecoverySettings extends AbstractComponent implements Closeable { clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setActivityTimeout); } - @Override - public void close() { - ThreadPool.terminate(concurrentStreamPool, 1, TimeUnit.SECONDS); - ThreadPool.terminate(concurrentSmallFileStreamPool, 1, TimeUnit.SECONDS); - } - - public ThreadPoolExecutor concurrentStreamPool() { - return concurrentStreamPool; - } - - public ThreadPoolExecutor concurrentSmallFileStreamPool() { - return concurrentSmallFileStreamPool; - } - public RateLimiter rateLimiter() { return rateLimiter; } @@ -176,10 +135,6 @@ public class RecoverySettings extends AbstractComponent implements Closeable { this.chunkSize = chunkSize; } - private void setConcurrentStreams(int concurrentStreams) { - this.concurrentStreams = concurrentStreams; - concurrentStreamPool.setMaximumPoolSize(concurrentStreams); - } public void setRetryDelayStateSync(TimeValue retryDelayStateSync) { this.retryDelayStateSync = retryDelayStateSync; @@ -211,9 +166,4 @@ public class RecoverySettings extends AbstractComponent implements Closeable { rateLimiter = new SimpleRateLimiter(maxBytesPerSec.mbFrac()); } } - - private void setConcurrentSmallFileStreams(int concurrentSmallFileStreams) { - this.concurrentSmallFileStreams = concurrentSmallFileStreams; - concurrentSmallFileStreamPool.setMaximumPoolSize(concurrentSmallFileStreams); - } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 1410f499078..10ab9956df4 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -58,9 +58,6 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.Comparator; import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.StreamSupport; @@ -591,100 +588,38 @@ public class RecoverySourceHandler { void sendFiles(Store store, StoreFileMetaData[] files, Function outputStreamFactory) throws Throwable { store.incRef(); try { - Future[] runners = asyncSendFiles(store, files, outputStreamFactory); - IOException corruptedEngine = null; - final List exceptions = new ArrayList<>(); - for (int i = 0; i < runners.length; i++) { - StoreFileMetaData md = files[i]; - try { - runners[i].get(); - } catch (ExecutionException t) { - corruptedEngine = handleExecutionException(store, corruptedEngine, exceptions, md, t.getCause()); - } catch (InterruptedException t) { - corruptedEngine = handleExecutionException(store, corruptedEngine, exceptions, md, t); + ArrayUtil.timSort(files, (a,b) -> Long.compare(a.length(), b.length())); // send smallest first + for (int i = 0; i < files.length; i++) { + final StoreFileMetaData md = files[i]; + try (final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { + // it's fine that we are only having the indexInput int he try/with block. The copy methods handles + // exceptions during close correctly and doesn't hide the original exception. + Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStreamFactory.apply(md)); + } catch (Throwable t) { + final IOException corruptIndexException; + if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(t)) != null) { + if (store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail! + logger.warn("{} Corrupted file detected {} checksum mismatch", shardId, md); + failEngine(corruptIndexException); + throw corruptIndexException; + } else { // corruption has happened on the way to replica + RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null); + exception.addSuppressed(t); + logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK", + corruptIndexException, shardId, request.targetNode(), md); + throw exception; + } + } else { + throw t; + } } } - if (corruptedEngine != null) { - failEngine(corruptedEngine); - throw corruptedEngine; - } else { - ExceptionsHelper.rethrowAndSuppress(exceptions); - } } finally { store.decRef(); } } - private IOException handleExecutionException(Store store, IOException corruptedEngine, List exceptions, StoreFileMetaData md, Throwable t) { - logger.debug("Failed to transfer file [" + md + "] on recovery"); - final IOException corruptIndexException; - final boolean checkIntegrity = corruptedEngine == null; - if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(t)) != null) { - if (checkIntegrity && store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail! - logger.warn("{} Corrupted file detected {} checksum mismatch", shardId, md); - corruptedEngine = corruptIndexException; - } else { // corruption has happened on the way to replica - RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null); - exception.addSuppressed(t); - if (checkIntegrity) { - logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK", - corruptIndexException, shardId, request.targetNode(), md); - } else { - logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum are skipped", - corruptIndexException, shardId, request.targetNode(), md); - } - exceptions.add(exception); - - } - } else { - exceptions.add(t); - } - return corruptedEngine; - } - protected void failEngine(IOException cause) { shard.failShard("recovery", cause); } - - Future[] asyncSendFiles(Store store, StoreFileMetaData[] files, Function outputStreamFactory) { - store.incRef(); - try { - final Future[] futures = new Future[files.length]; - for (int i = 0; i < files.length; i++) { - final StoreFileMetaData md = files[i]; - long fileSize = md.length(); - - // Files are split into two categories, files that are "small" - // (under 5mb) and other files. Small files are transferred - // using a separate thread pool dedicated to small files. - // - // The idea behind this is that while we are transferring an - // older, large index, a user may create a new index, but that - // index will not be able to recover until the large index - // finishes, by using two different thread pools we can allow - // tiny files (like segments for a brand new index) to be - // recovered while ongoing large segment recoveries are - // happening. It also allows these pools to be configured - // separately. - ThreadPoolExecutor pool; - if (fileSize > RecoverySettings.SMALL_FILE_CUTOFF_BYTES) { - pool = recoverySettings.concurrentStreamPool(); - } else { - pool = recoverySettings.concurrentSmallFileStreamPool(); - } - Future future = pool.submit(() -> { - try (final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { - // it's fine that we are only having the indexInput int he try/with block. The copy methods handles - // exceptions during close correctly and doesn't hide the original exception. - Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStreamFactory.apply(md)); - } - return null; - }); - futures[i] = future; - } - return futures; - } finally { - store.decRef(); - } - } } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index c964e79587e..a9651eace33 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -322,7 +322,6 @@ public class Node implements Releasable { for (Class plugin : pluginsService.nodeServices()) { injector.getInstance(plugin).stop(); } - injector.getInstance(RecoverySettings.class).close(); // we should stop this last since it waits for resources to get released // if we had scroll searchers etc or recovery going on we wait for to finish. injector.getInstance(IndicesService.class).stop(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index b0b5e9fd517..7011b4092e4 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; @@ -117,7 +118,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { public Settings nodeSettings(int ord) { return Settings.builder() .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), 30) // increase recovery speed for small files + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) // speed up recoveries + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30) .build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index c5e48a97dfd..03cfbf2b307 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -50,7 +50,8 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { .put(super.nodeSettings(nodeOrdinal)) //make sure that enough concurrent reroutes can happen at the same time //we have a minimum of 2 nodes, and a maximum of 10 shards, thus 5 should be enough - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 5) .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 10) .build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 4298b27fa3d..6b406a3bfdf 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -162,7 +162,8 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testDelayWithALargeAmountOfShards() throws Exception { Settings commonSettings = settingsBuilder() - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 1) .build(); logger.info("--> starting 4 nodes"); String node_1 = internalCluster().startNode(commonSettings); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java index b451183826b..72ecc171eed 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java @@ -32,7 +32,7 @@ public final class RandomShardRoutingMutator { } public static void randomChange(ShardRouting shardRouting, String[] nodes) { - switch (randomInt(3)) { + switch (randomInt(2)) { case 0: if (shardRouting.unassigned() == false) { shardRouting.moveToUnassigned(new UnassignedInfo(randomReason(), randomAsciiOfLength(10))); @@ -46,13 +46,6 @@ public final class RandomShardRoutingMutator { } break; case 2: - if (shardRouting.primary()) { - shardRouting.moveFromPrimary(); - } else { - shardRouting.moveToPrimary(); - } - break; - case 3: if (shardRouting.initializing()) { shardRouting.moveToStarted(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index d69264a1e3a..fcdef700b98 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -50,7 +50,7 @@ public class RoutingTableTests extends ESAllocationTestCase { private int totalNumberOfShards; private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private final AllocationService ALLOCATION_SERVICE = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .build()); private ClusterState clusterState; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 1cf5ba0083d..4c4fa72a6ec 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -54,7 +54,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(AllocationCommandsTests.class); public void testMoveShardCommand() { - AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("creating an index with 1 shard, no replica"); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java index 8d510e7f0c5..52aad66776e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java @@ -38,9 +38,10 @@ public class AllocationPriorityTests extends ESAllocationTestCase { */ public void testPrioritizedIndicesAllocatedFirst() { AllocationService allocation = createAllocationService(settingsBuilder(). - put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 1) + put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 1).build()); + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 1).build()); final String highPriorityName; final String lowPriorityName; final int priorityFirst; @@ -84,7 +85,7 @@ public class AllocationPriorityTests extends ESAllocationTestCase { routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); - assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); + assertEquals(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).toString(),2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index e9d0f75b1c1..eb94b6de109 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -54,7 +54,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -386,7 +386,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded5() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -464,7 +464,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded6() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -544,7 +544,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness1() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") @@ -611,7 +611,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness2() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") @@ -827,7 +827,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testUnassignedShardsWithUnbalancedZones() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "zone") .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 627febdbc11..08cbdc09fe0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; @@ -358,7 +359,9 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public boolean allocateUnassigned(RoutingAllocation allocation) { RoutingNodes.UnassignedShards unassigned = allocation.routingNodes().unassigned(); boolean changed = !unassigned.isEmpty(); - for (ShardRouting sr : unassigned.drain()) { + ShardRouting[] drain = unassigned.drain(); + ArrayUtil.timSort(drain, (a, b) -> { return a.primary() ? -1 : 1; }); // we have to allocate primaries first + for (ShardRouting sr : drain) { switch (sr.id()) { case 0: if (sr.primary()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java index 34d78ae3099..886462610ca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java @@ -43,7 +43,7 @@ public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { public void testClusterConcurrentRebalance() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.cluster_concurrent_rebalance", 3) .build()); @@ -145,4 +145,4 @@ public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(10)); assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(0)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index d807dc1b5ca..cb09fb93b60 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -45,7 +45,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testSimpleDeadNodeOnStartedPrimaryShard() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -96,7 +96,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnToNode() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -170,7 +170,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnFromNode() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java index e7c956c4ccd..fc686f0bb5a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java @@ -43,7 +43,7 @@ public class ElectReplicaAsPrimaryDuringRelocationTests extends ESAllocationTest private final ESLogger logger = Loggers.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class); public void testElectReplicaAsPrimaryDuringRelocation() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 8dffacaa379..b8ab9c13590 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -56,7 +56,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailedShardPrimaryRelocatingToAndFrom() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -144,7 +144,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailPrimaryStartedCheckReplicaElected() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -225,7 +225,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureSingleNode() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -281,7 +281,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testSingleShardMultipleAllocationFailures() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -337,7 +337,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureTwoNodes() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -397,7 +397,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testRebalanceFailure() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index a8d015a0d49..2b0c7ef6bda 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -57,7 +57,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testDoNotAllocateFromPrimary() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -171,7 +171,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRandom() { AllocationService service = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -220,7 +220,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRollingRestart() { AllocationService service = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java index 0ac98d4f92b..d4beb7190e3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESAllocationTestCase; @@ -42,6 +43,7 @@ public class PreferPrimaryAllocationTests extends ESAllocationTestCase { logger.info("create an allocation with 1 initial recoveries"); AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java index e994c885629..7e59ab8a6b4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java @@ -43,7 +43,7 @@ public class PrimaryElectionRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(PrimaryElectionRoutingTests.class); public void testBackupElectionToPrimaryWhenPrimaryCanBeAllocatedToAnotherNode() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -93,7 +93,7 @@ public class PrimaryElectionRoutingTests extends ESAllocationTestCase { } public void testRemovingInitializingReplicasIfPrimariesFails() { - AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java index 12ff9fd3f7d..371624484ff 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java @@ -44,6 +44,7 @@ public class PrimaryNotRelocatedWhileBeingRecoveredTests extends ESAllocationTes public void testPrimaryNotRelocatedWhileBeingRecoveredFrom() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put("cluster.routing.allocation.concurrent_source_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index 4d5f4d07ea1..abc561a0916 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; @@ -57,7 +58,7 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { public void testRandomDecisions() { RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(getRandom()); AllocationService strategy = new AllocationService(settingsBuilder().build(), new AllocationDeciders(Settings.EMPTY, - new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), + new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY), randomAllocationDecider))), new ShardsAllocators(NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); int indices = scaledRandomIntBetween(1, 20); Builder metaBuilder = MetaData.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 18725a0de78..4672f339c70 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -56,7 +56,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { } AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java index 0d33b5ecd46..1b8bea26dbe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java @@ -45,7 +45,7 @@ public class ReplicaAllocatedAfterPrimaryTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ReplicaAllocatedAfterPrimaryTests.class); public void testBackupIsAllocatedAfterPrimary() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java index eec1b48be97..9a4e56a26b2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESAllocationTestCase; @@ -211,6 +212,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 1) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 3) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index c0f0c0c2252..dd3f3f373ff 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -46,7 +46,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ShardsLimitAllocationTests.class); public void testIndexLevelShardsLimitAllocate() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -89,7 +89,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { public void testClusterLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1) .build()); @@ -125,7 +125,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { // Bump the cluster total shards to 2 strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2) .build()); @@ -147,7 +147,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { public void testIndexLevelShardsLimitRemain() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.balance.index", 0.0f) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index 29ef451324d..bf41ad8a053 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -59,7 +59,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(SingleShardNoReplicasRoutingTests.class); public void testSingleIndexStartedShard() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -160,7 +160,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { } public void testSingleIndexShardFailed() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -210,7 +210,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexEvenDistribution() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -322,7 +322,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexUnevenNodes() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -413,4 +413,4 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { assertThat(routingNode.numberOfShardsWithState(STARTED), equalTo(2)); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java index ff442852017..f7033ec2596 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java @@ -44,7 +44,7 @@ public class SingleShardOneReplicaRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java index 28033915abe..0712e9cd02a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java @@ -51,9 +51,9 @@ public class StartedShardsRoutingTests extends ESAllocationTestCase { .nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) .metaData(MetaData.builder().put(indexMetaData, false)); - final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1); - final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", randomBoolean(), ShardRoutingState.STARTED, 1); - final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", randomBoolean(), ShardRoutingState.RELOCATING, 1); + final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", true, ShardRoutingState.INITIALIZING, 1); + final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", true, ShardRoutingState.STARTED, 1); + final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1); stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder("test") .addIndexShard(new IndexShardRoutingTable.Builder(initShard.shardId()).addShard(initShard).build()) .addIndexShard(new IndexShardRoutingTable.Builder(startedShard.shardId()).addShard(startedShard).build()) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 223da88192b..1d60436d3c7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -25,11 +25,16 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; +import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -103,7 +108,8 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { public void testReplicaAndPrimaryRecoveryThrottling() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 3) + .put("cluster.routing.allocation.node_concurrent_recoveries", 3) + .put("cluster.routing.allocation.concurrent_source_recoveries", 3) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 3) .build()); @@ -169,4 +175,157 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(0)); assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); } + + public void testThrottleIncomingAndOutgoing() { + Settings settings = settingsBuilder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 5) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 5) + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 5) + .build(); + AllocationService strategy = createAllocationService(settings); + logger.info("Building initial routing table"); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(9).numberOfReplicas(0)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + logger.info("start one node, do reroute, only 5 should initialize"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(4)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 5); + + logger.info("start initializing, all primaries should be started"); + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(4)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + logger.info("start another 2 nodes, 5 shards should be relocating - at most 5 are allowed per node"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node2")).put(newNode("node3"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(4)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 3); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 2); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 5); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + logger.info("start the relocating shards, one more shard should relocate away from node1"); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(8)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + } + + public void testOutgoingThrottlesAllocaiton() { + Settings settings = settingsBuilder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 1) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 1) + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 1) + .build(); + AllocationService strategy = createAllocationService(settings); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(3).numberOfReplicas(0)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node1").get(0).shardId(), "node1", "node2"))); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES); + routingTable = reroute.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + // outgoing throttles + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node1")), true); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + // incoming throttles + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node2")), true); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); + + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java index 7fa27e7050c..5ff5af4e4cd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java @@ -46,7 +46,7 @@ public class UpdateNumberOfReplicasTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class); public void testUpdateNumberOfReplicas() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index e319d4156ac..fa52503eac5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -107,7 +107,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -192,7 +192,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -223,7 +223,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -303,7 +303,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -360,7 +360,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -427,7 +427,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -458,7 +458,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -567,7 +567,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -635,7 +635,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -738,7 +738,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -900,7 +900,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider ))); AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -1000,8 +1000,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ))); AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") + .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); RoutingAllocation.Result result = strategy.reroute(clusterState, "reroute"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index f8be6a8c4da..b2559c29ed2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -159,6 +159,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { Settings build = settingsBuilder() .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .build(); ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index 6094d49234c..ced1e0097a1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -224,7 +224,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testAttributePreferenceRouting() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id,zone") .build()); @@ -279,7 +279,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testNodeSelectorRouting(){ AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); @@ -336,7 +336,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testShardsAndPreferNodeRouting() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .build()); MetaData metaData = MetaData.builder() @@ -397,7 +397,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testReplicaShardPreferenceIters() throws Exception { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .build()); OperationRouting operationRouting = new OperationRouting(Settings.Builder.EMPTY_SETTINGS, new AwarenessAllocationDecider()); @@ -479,4 +479,4 @@ public class RoutingIteratorTests extends ESAllocationTestCase { assertTrue(routing.primary()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 2c6a55da242..0de220a8fa3 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -56,7 +56,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { ClusterChangedEvent generateEvent(boolean initializing, boolean versionChanged, boolean masterEligible) { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 100) + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) @@ -110,7 +110,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { ClusterChangedEvent generateCloseEvent(boolean masterEligible) { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 100) + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 1dfab4f62d3..f0650a1cbda 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -319,14 +319,13 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { assertThat(state.metaData().index("test").getAliases().get("test_alias").filter(), notNullValue()); } - @TestLogging("gateway:TRACE,indices.recovery:TRACE,index.engine:TRACE") public void testReusePeerRecovery() throws Exception { final Settings settings = settingsBuilder() .put("action.admin.cluster.node.shutdown.delay", "10ms") .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) .put("gateway.recover_after_nodes", 4) - - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 4) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 4) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 4) .put(MockFSDirectoryService.CRASH_INDEX, false).build(); internalCluster().startNodesAsync(4, settings).get(); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 496bdc21b15..1dfe8514502 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -114,9 +114,8 @@ public class CorruptedFileIT extends ESIntegTestCase { // and we need to make sure primaries are not just trashed if we don't have replicas .put(super.nodeSettings(nodeOrdinal)) // speed up recoveries - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), 10) - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), 10) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 5) .build(); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 707fbe0e02e..a64b8606aea 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -108,7 +108,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { assertEquals(0, recoveryDiff.missing.size()); IndexReader reader = DirectoryReader.open(targetStore.directory()); assertEquals(numDocs, reader.maxDoc()); - IOUtils.close(reader, writer, store, targetStore, recoverySettings); + IOUtils.close(reader, writer, store, targetStore); } public void testHandleCorruptedIndexOnSendSendFiles() throws Throwable { @@ -170,7 +170,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { assertNotNull(ExceptionsHelper.unwrapCorruption(ex)); } assertTrue(failedEngine.get()); - IOUtils.close(store, targetStore, recoverySettings); + IOUtils.close(store, targetStore); } @@ -231,7 +231,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { fail("not expected here"); } assertFalse(failedEngine.get()); - IOUtils.close(store, targetStore, recoverySettings); + IOUtils.close(store, targetStore); } private Store newStore(Path path) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java index d7e13be312f..26c22fc3bb0 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java @@ -32,18 +32,6 @@ public class RecoverySettingsTests extends ESSingleNodeTestCase { } public void testAllSettingsAreDynamicallyUpdatable() { - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), randomIntBetween(1, 200), new Validator() { - @Override - public void validate(RecoverySettings recoverySettings, int expectedValue) { - assertEquals(expectedValue, recoverySettings.concurrentStreamPool().getMaximumPoolSize()); - } - }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), randomIntBetween(1, 200), new Validator() { - @Override - public void validate(RecoverySettings recoverySettings, int expectedValue) { - assertEquals(expectedValue, recoverySettings.concurrentSmallFileStreamPool().getMaximumPoolSize()); - } - }); innerTestSettings(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), 0, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index d53740c198b..234fd693498 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -200,6 +200,11 @@ If you are using any of these settings please take the time and review their pur _expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent cluster settings please use the settings update API and set their superseded keys accordingly. +The following settings have been removed without replacement + + * `indices.recovery.concurrent_small_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + * `indices.recovery.concurrent_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + ==== Translog settings The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log @@ -211,6 +216,14 @@ anymore, the `buffered` implementation is now the only available option and uses The deprecated settings `index.cache.query.enable` and `indices.cache.query.size` have been removed and are replaced with `index.requests.cache.enable` and `indices.requests.cache.size` respectively. +==== Allocation settings + +Allocation settings deprecated in 1.x have been removed: + + * `cluster.routing.allocation.concurrent_recoveries` is superseded by `cluster.routing.allocation.node_concurrent_recoveries` + +Please change the setting in your configuration files or in the clusterstate to use the new settings instead. + [[breaking_30_mapping_changes]] === Mapping changes diff --git a/docs/reference/modules/cluster/shards_allocation.asciidoc b/docs/reference/modules/cluster/shards_allocation.asciidoc index b8073927a0f..b650e237629 100644 --- a/docs/reference/modules/cluster/shards_allocation.asciidoc +++ b/docs/reference/modules/cluster/shards_allocation.asciidoc @@ -27,10 +27,15 @@ one of the active allocation ids in the cluster state. -- -`cluster.routing.allocation.node_concurrent_recoveries`:: +`cluster.routing.allocation.node_concurrent_incoming_recoveries`:: - How many concurrent shard recoveries are allowed to happen on a node. - Defaults to `2`. + How many concurrent incoming shard recoveries are allowed to happen on a node. Incoming recoveries are the recoveries + where the target shard (most likely the replica unless a shard is relocating) is allocated on the node. Defaults to `2`. + +`cluster.routing.allocation.node_concurrent_outgoing_recoveries`:: + + How many concurrent outgoing shard recoveries are allowed to happen on a node. Outgoing recoveries are the recoveries + where the source shard (most likely the primary unless a shard is relocating) is allocated on the node. Defaults to `2`. `cluster.routing.allocation.node_initial_primaries_recoveries`:: @@ -47,17 +52,6 @@ one of the active allocation ids in the cluster state. Defaults to `false`, meaning that no check is performed by default. This setting only applies if multiple nodes are started on the same machine. -`indices.recovery.concurrent_streams`:: - - The number of network streams to open per node to recover a shard from - a peer shard. Defaults to `3`. - -`indices.recovery.concurrent_small_file_streams`:: - - The number of streams to open per node for small files (under 5mb) to - recover a shard from a peer shard. Defaults to `2`. - - [float] === Shard Rebalancing Settings diff --git a/docs/reference/modules/indices/recovery.asciidoc b/docs/reference/modules/indices/recovery.asciidoc index cd21f135e38..8de3309347c 100644 --- a/docs/reference/modules/indices/recovery.asciidoc +++ b/docs/reference/modules/indices/recovery.asciidoc @@ -3,12 +3,6 @@ The following _expert_ settings can be set to manage the recovery policy. -`indices.recovery.concurrent_streams`:: - Defaults to `3`. - -`indices.recovery.concurrent_small_file_streams`:: - Defaults to `2`. - `indices.recovery.file_chunk_size`:: Defaults to `512kb`. diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 5ab862e3e4d..ea2796aad84 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -306,13 +306,11 @@ public final class InternalTestCluster extends TestCluster { builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b"); builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b"); if (TEST_NIGHTLY) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 10, 15)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 10, 15)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); } else if (random.nextInt(100) <= 90) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 3, 6)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS_SETTING.getKey(), RandomInts.randomIntBetween(random, 3, 6)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); } // always reduce this - it can make tests really slow builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); From 6f3fc5d75d967f92ae908a1eeb028e0619741e2e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 22 Dec 2015 15:09:44 +0100 Subject: [PATCH 226/322] Speed up CodecTests. Some tests, but in particular CodecTests, are slow because they test all versions that ever existed even though they should only test supported versions. --- .../routing/RoutingBackwardCompatibilityTests.java | 4 ++++ .../test/java/org/elasticsearch/codecs/CodecTests.java | 8 ++++++++ 2 files changed, 12 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java index e8be4e34ae0..5ff4a328ef6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java @@ -53,6 +53,10 @@ public class RoutingBackwardCompatibilityTests extends ESTestCase { OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, null); for (Version version : VersionUtils.allVersions()) { + if (version.onOrAfter(Version.V_2_0_0) == false) { + // unsupported version, no need to test + continue; + } final Settings settings = settings(version).build(); IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(settings).numberOfShards(numberOfShards).numberOfReplicas(randomInt(3)).build(); MetaData.Builder metaData = MetaData.builder().put(indexMetaData, false); diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index cffdcdc86eb..e5d27b872fb 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -44,6 +44,10 @@ public class CodecTests extends ESSingleNodeTestCase { .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { + if (v.onOrAfter(Version.V_2_0_0) == false) { + // no need to test, we don't support upgrading from these versions + continue; + } IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { @@ -67,6 +71,10 @@ public class CodecTests extends ESSingleNodeTestCase { .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { + if (v.onOrAfter(Version.V_2_0_0) == false) { + // no need to test, we don't support upgrading from these versions + continue; + } IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { From 6d0114edb58bd57290d327370b4b4b8a4778edd7 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 22 Dec 2015 12:40:28 +0100 Subject: [PATCH 227/322] Fix Java API documentation for indexed scripts --- docs/java-api/indexed-scripts.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/java-api/indexed-scripts.asciidoc b/docs/java-api/indexed-scripts.asciidoc index 7bfb9f3273f..45d19ae662d 100644 --- a/docs/java-api/indexed-scripts.asciidoc +++ b/docs/java-api/indexed-scripts.asciidoc @@ -10,7 +10,7 @@ and delete indexed scripts and templates. PutIndexedScriptResponse = client.preparePutIndexedScript() .setScriptLang("groovy") .setId("script1") - .setSource("_score * doc['my_numeric_field'].value") + .setSource("script", "_score * doc['my_numeric_field'].value") .execute() .actionGet(); From f9a601c7da8ae050754dbd24f710c0102b7aafa7 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 22 Dec 2015 17:31:06 +0100 Subject: [PATCH 228/322] Make BulkProcessorRetryIT less sensitive In this commit we increase the queue size of the bulk pool in BulkProcessorRetryIT to make it less sensitive. As this test case should stress the pool so bulk processor needs to back off but not so much that the backoff policy will give up at some point (which is a valid condition), we still keep it below the default queue size of 50. --- .../org/elasticsearch/action/bulk/BulkProcessorRetryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 3c38e2ef0fa..3fd32361215 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -54,7 +54,7 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { //.put("threadpool.listener.queue_size", 1) .put("threadpool.get.queue_size", 1) // default is 50 - .put("threadpool.bulk.queue_size", 20) + .put("threadpool.bulk.queue_size", 30) .build(); } From 7abd05173496efaad1281badc14c13295b88a62b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 22 Dec 2015 12:07:37 -0500 Subject: [PATCH 229/322] better containing of hadoop for actual blobstore operations --- .../repositories/hdfs/HdfsBlobStore.java | 11 +++++++++-- .../elasticsearch/repositories/hdfs/HdfsPlugin.java | 2 ++ .../src/main/plugin-metadata/plugin-security.policy | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 0351ae28219..23404a7c360 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -29,10 +29,13 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import java.io.IOException; +import java.lang.reflect.ReflectPermission; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import javax.security.auth.AuthPermission; + final class HdfsBlobStore implements BlobStore { private final Path root; @@ -110,10 +113,13 @@ final class HdfsBlobStore implements BlobStore { interface Operation { V run(FileContext fileContext) throws IOException; } - + /** * Executes the provided operation against this store */ + // we can do FS ops with only two elevated permissions: + // 1) hadoop dynamic proxy is messy with access rules + // 2) allow hadoop to add credentials to our Subject V execute(Operation operation) throws IOException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -129,7 +135,8 @@ final class HdfsBlobStore implements BlobStore { public V run() throws IOException { return operation.run(fileContext); } - }); + }, null, new ReflectPermission("suppressAccessChecks"), + new AuthPermission("modifyPrivateCredentials")); } catch (PrivilegedActionException pae) { throw (IOException) pae.getException(); } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java index e85f0c0c865..ccd0b405ff2 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -63,6 +63,8 @@ public final class HdfsPlugin extends Plugin { Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.ShutdownHookManager"); Class.forName("org.apache.hadoop.conf.Configuration"); + Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants"); + Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck"); } catch (ClassNotFoundException | IOException e) { throw new RuntimeException(e); } finally { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index 83e5599a9c8..85447245c96 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -18,7 +18,7 @@ */ grant { - // Hadoop UserGroupInformation clinit + // Hadoop UserGroupInformation, HdfsConstants, PipelineAck clinit permission java.lang.RuntimePermission "getClassLoader"; // UserGroupInformation (UGI) Metrics clinit From 482843e27b6b498e1c045cbae5ad250102e76bac Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 21 Dec 2015 11:06:15 -0700 Subject: [PATCH 230/322] Fix build to run correctly on FreeBSD This adds the required changes/checks so that the build can run on FreeBSD. There are a few things that differ between FreeBSD and Linux: - CPU probes return -1 for CPU usage - `hot_threads` cannot be supported on FreeBSD From OpenJDK's `os_bsd.cpp`: ```c++ bool os::is_thread_cpu_time_supported() { #ifdef __APPLE__ return true; #else return false; #endif } ``` So this API now returns (for each FreeBSD node): ``` curl -s localhost:9200/_nodes/hot_threads ::: {Devil Hunter Gabriel}{q8OJnKCcQS6EB9fygU4R4g}{127.0.0.1}{127.0.0.1:9300} hot_threads is not supported on FreeBSD ``` - multicast fails in native `join` method - known bug: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193246 Which causes: ``` 1> Caused by: java.net.SocketException: Invalid argument 1> at java.net.PlainDatagramSocketImpl.join(Native Method) 1> at java.net.AbstractPlainDatagramSocketImpl.join(AbstractPlainDatagramSocketImpl.java:179) 1> at java.net.MulticastSocket.joinGroup(MulticastSocket.java:323) 1> at org.elasticsearch.plugin.discovery.multicast.MulticastChannel$Plain.buildMulticastSocket(MulticastChannel.java:309) ``` So these tests are skipped on FreeBSD. Resolves #15562 --- .../elasticsearch/monitor/jvm/HotThreads.java | 6 +++++ .../action/admin/HotThreadsIT.java | 22 ++++++++++++++----- .../multicast/MulticastZenPingTests.java | 11 ++++++++++ .../test/cat.nodes/10_basic.yaml | 8 +++---- 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index bf9a04ed734..6efbc54403b 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -20,6 +20,7 @@ package org.elasticsearch.monitor.jvm; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.unit.TimeValue; @@ -131,6 +132,11 @@ public class HotThreads { private String innerDetect() throws Exception { StringBuilder sb = new StringBuilder(); + if (Constants.FREE_BSD) { + sb.append("hot_threads is not supported on FreeBSD"); + return sb.toString(); + } + sb.append("Hot threads at "); sb.append(DATE_TIME_FORMATTER.printer().print(System.currentTimeMillis())); sb.append(", interval="); diff --git a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java index a6217d7ea64..6c11bc35dec 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.admin; +import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; @@ -40,6 +41,7 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.lessThan; public class HotThreadsIT extends ESIntegTestCase { + public void testHotThreadsDontFail() throws ExecutionException, InterruptedException { /** * This test just checks if nothing crashes or gets stuck etc. @@ -125,6 +127,7 @@ public class HotThreadsIT extends ESIntegTestCase { } public void testIgnoreIdleThreads() throws ExecutionException, InterruptedException { + assumeTrue("no support for hot_threads on FreeBSD", Constants.FREE_BSD == false); // First time, don't ignore idle threads: NodesHotThreadsRequestBuilder builder = client().admin().cluster().prepareNodesHotThreads(); @@ -158,12 +161,19 @@ public class HotThreadsIT extends ESIntegTestCase { NodesHotThreadsResponse response = client().admin().cluster().prepareNodesHotThreads().execute().get(); - for (NodeHotThreads node : response.getNodesMap().values()) { - String result = node.getHotThreads(); - assertTrue(result.indexOf("Hot threads at") != -1); - assertTrue(result.indexOf("interval=500ms") != -1); - assertTrue(result.indexOf("busiestThreads=3") != -1); - assertTrue(result.indexOf("ignoreIdleThreads=true") != -1); + if (Constants.FREE_BSD) { + for (NodeHotThreads node : response.getNodesMap().values()) { + String result = node.getHotThreads(); + assertTrue(result.indexOf("hot_threads is not supported") != -1); + } + } else { + for (NodeHotThreads node : response.getNodesMap().values()) { + String result = node.getHotThreads(); + assertTrue(result.indexOf("Hot threads at") != -1); + assertTrue(result.indexOf("interval=500ms") != -1); + assertTrue(result.indexOf("busiestThreads=3") != -1); + assertTrue(result.indexOf("ignoreIdleThreads=true") != -1); + } } } } diff --git a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java index ba673127f4f..8c2d95ec799 100644 --- a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java +++ b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.discovery.multicast; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -45,6 +46,7 @@ import java.net.InetAddress; import java.net.MulticastSocket; public class MulticastZenPingTests extends ESTestCase { + private Settings buildRandomMulticast(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); builder.put("discovery.zen.ping.multicast.group", "224.2.3." + randomIntBetween(0, 255)); @@ -57,6 +59,7 @@ public class MulticastZenPingTests extends ESTestCase { } public void testSimplePings() throws InterruptedException { + assumeTrue("https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193246", Constants.FREE_BSD == false); Settings settings = Settings.EMPTY; settings = buildRandomMulticast(settings); Thread.sleep(30000); @@ -129,8 +132,16 @@ public class MulticastZenPingTests extends ESTestCase { } } + // This test is here because when running on FreeBSD, if no tests are + // executed for the 'multicast' project it will assume everything + // failed, so we need to have at least one test that runs. + public void testAlwaysRun() throws Exception { + assertTrue(true); + } + @SuppressForbidden(reason = "I bind to wildcard addresses. I am a total nightmare") public void testExternalPing() throws Exception { + assumeTrue("https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193246", Constants.FREE_BSD == false); Settings settings = Settings.EMPTY; settings = buildRandomMulticast(settings); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml index 2531e6ef025..f41e14919f8 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -6,8 +6,8 @@ - match: $body: | - / #host ip heap.percent ram.percent cpu load node.role master name - ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ + / #host ip heap.percent ram.percent cpu load node.role master name + ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: @@ -15,8 +15,8 @@ - match: $body: | - /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n - (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ + /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n + (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: From 4c9a9b4b2d82e2c724baef2fe73220b4e050e932 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 22 Dec 2015 16:54:44 -0500 Subject: [PATCH 231/322] Reduce number of threads in o.e.c.c.CacheTests This commit reduces the randomized number of threads in the cache tests to reduce the amount of time that the tests take to execute. --- .../java/org/elasticsearch/common/cache/CacheTests.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index 369aa8bcafd..d0b7d998dd5 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -492,7 +492,7 @@ public class CacheTests extends ESTestCase { } public void testComputeIfAbsentCallsOnce() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); List threads = new ArrayList<>(); AtomicReferenceArray flags = new AtomicReferenceArray(numberOfEntries); @@ -558,7 +558,7 @@ public class CacheTests extends ESTestCase { } } - int numberOfThreads = randomIntBetween(2, 256); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); CountDownLatch deadlockLatch = new CountDownLatch(numberOfThreads); @@ -626,7 +626,7 @@ public class CacheTests extends ESTestCase { } public void testCachePollution() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); List threads = new ArrayList<>(); @@ -676,7 +676,7 @@ public class CacheTests extends ESTestCase { // test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key // here be dragons: this test did catch one subtle bug during development; do not remove lightly public void testTorture() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder() .setMaximumWeight(1000) From f535c2702493c7d4415a1fce7bdae13cedad1186 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 16 Dec 2015 11:28:32 +0100 Subject: [PATCH 232/322] Make mapping updates more robust. This changes a couple of things: Mappings are truly immutable. Before, each field mapper stored a MappedFieldTypeReference that was shared across fields that have the same name across types. This means that a mapping update could have the side-effect of changing the field type in other types when updateAllTypes is true. This works differently now: after a mapping update, a new copy of the mappings is created in such a way that fields across different types have the same MappedFieldType. See the new Mapper.updateFieldType API which replaces MappedFieldTypeReference. DocumentMapper is now immutable and MapperService.merge has been refactored in such a way that if an exception is thrown while eg. lookup structures are being updated, then the whole mapping update will be aborted. As a consequence, FieldTypeLookup's checkCompatibility has been folded into copyAndAddAll. Synchronization was simplified: given that mappings are truly immutable, we don't need the read/write lock so that no documents can be parsed while a mapping update is being processed. Document parsing is not performed under a lock anymore, and mapping merging uses a simple synchronized block. --- .../metadata/MetaDataMappingService.java | 5 +- .../index/analysis/FieldNameAnalyzer.java | 30 +--- .../index/mapper/DocumentFieldMappers.java | 68 ++++----- .../index/mapper/DocumentMapper.java | 117 +++++--------- .../index/mapper/DocumentMapperParser.java | 9 +- .../index/mapper/DocumentParser.java | 19 +-- .../index/mapper/FieldMapper.java | 69 ++++++--- .../index/mapper/FieldTypeLookup.java | 109 ++++++------- .../mapper/MappedFieldTypeReference.java | 41 ----- .../elasticsearch/index/mapper/Mapper.java | 7 + .../index/mapper/MapperBuilders.java | 5 +- .../index/mapper/MapperService.java | 141 +++++++++-------- .../elasticsearch/index/mapper/Mapping.java | 14 +- .../mapper/geo/BaseGeoPointFieldMapper.java | 27 +++- .../internal/FieldNamesFieldMapper.java | 2 +- .../mapper/internal/TimestampFieldMapper.java | 8 +- .../index/mapper/object/ObjectMapper.java | 23 +++ .../index/mapper/object/RootObjectMapper.java | 6 + .../index/engine/InternalEngineTests.java | 5 +- .../mapper/DocumentFieldMapperTests.java | 144 ++++++++++++++++++ .../index/mapper/FieldTypeLookupTests.java | 79 ++++------ .../camelcase/CamelCaseFieldNameTests.java | 1 + .../mapper/copyto/CopyToMapperTests.java | 27 ++-- .../core/TokenCountFieldMapperTests.java | 16 +- .../mapper/date/SimpleDateMappingTests.java | 7 +- .../GenericStoreDynamicTemplateTests.java | 1 + .../PathMatchDynamicTemplateTests.java | 1 + .../simple/SimpleDynamicTemplatesTests.java | 3 + .../mapper/externalvalues/ExternalMapper.java | 41 ++++- .../ExternalMetadataMapper.java | 14 +- .../mapper/geo/GeoShapeFieldMapperTests.java | 8 +- .../mapper/index/IndexTypeMapperTests.java | 29 +--- .../internal/FieldNamesFieldMapperTests.java | 12 +- .../mapper/lucene/DoubleIndexingDocTests.java | 1 + .../mapper/merge/TestMergeMapperTests.java | 35 ++--- .../mapper/multifield/MultiFieldTests.java | 8 +- .../merge/JavaMultiFieldMergeTests.java | 27 +--- .../mapper/numeric/SimpleNumericTests.java | 2 + .../mapper/simple/SimpleMapperTests.java | 13 +- .../source/DefaultSourceMappingTests.java | 4 +- .../string/SimpleStringMappingTests.java | 8 +- .../timestamp/TimestampMappingTests.java | 63 +++++--- .../index/mapper/ttl/TTLMappingTests.java | 91 +++-------- .../mapper/update/UpdateMappingTests.java | 8 +- .../mapping/UpdateMappingIntegrationIT.java | 2 +- .../all_mapping_update_with_conflicts.json | 2 +- .../index/mapper/size/SizeMappingTests.java | 7 +- 47 files changed, 694 insertions(+), 665 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 739d831597a..14f9f500c45 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -259,9 +259,8 @@ public class MetaDataMappingService extends AbstractComponent { } else { newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null); if (existingMapper != null) { - // first, simulate - // this will just throw exceptions in case of problems - existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); + // first, simulate: just call merge and ignore the result + existingMapper.merge(newMapper.mapping(), request.updateAllTypes()); } else { // TODO: can we find a better place for this validation? // The reason this validation is here is that the mapper service doesn't learn about diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java index 68e3c3ee450..34829cedeed 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FieldNameAnalyzer.java @@ -23,36 +23,24 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.elasticsearch.common.collect.CopyOnWriteHashMap; -import java.util.AbstractMap; import java.util.Map; -import java.util.stream.Stream; /** * */ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper { - private final CopyOnWriteHashMap analyzers; - private final Analyzer defaultAnalyzer; + private final Map analyzers; - public FieldNameAnalyzer(Analyzer defaultAnalyzer) { - this(new CopyOnWriteHashMap<>(), defaultAnalyzer); - } - - public FieldNameAnalyzer(Map analyzers, Analyzer defaultAnalyzer) { + public FieldNameAnalyzer(Map analyzers) { super(Analyzer.PER_FIELD_REUSE_STRATEGY); this.analyzers = CopyOnWriteHashMap.copyOf(analyzers); - this.defaultAnalyzer = defaultAnalyzer; } public Map analyzers() { return analyzers; } - public Analyzer defaultAnalyzer() { - return defaultAnalyzer; - } - @Override protected Analyzer getWrappedAnalyzer(String fieldName) { Analyzer analyzer = analyzers.get(fieldName); @@ -63,18 +51,4 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper { // Fields need to be explicitly added throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer"); } - - /** - * Return a new instance that contains the union of this and of the provided analyzers. - */ - public FieldNameAnalyzer copyAndAddAll(Stream> mappers) { - CopyOnWriteHashMap result = analyzers.copyAndPutAll(mappers.map((e) -> { - if (e.getValue() == null) { - return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer); - } - return e; - })); - return new FieldNameAnalyzer(result, defaultAnalyzer); - } - } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index e14d7a0cd63..90da570bbe6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -20,15 +20,15 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.FieldNameAnalyzer; -import java.util.AbstractMap; import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.Map; import java.util.Set; /** @@ -37,44 +37,38 @@ import java.util.Set; public final class DocumentFieldMappers implements Iterable { /** Full field name to mapper */ - private final CopyOnWriteHashMap fieldMappers; + private final Map fieldMappers; private final FieldNameAnalyzer indexAnalyzer; private final FieldNameAnalyzer searchAnalyzer; private final FieldNameAnalyzer searchQuoteAnalyzer; - public DocumentFieldMappers(AnalysisService analysisService) { - this(new CopyOnWriteHashMap(), - new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()), - new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()), - new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer())); - } - - private DocumentFieldMappers(CopyOnWriteHashMap fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) { - this.fieldMappers = fieldMappers; - this.indexAnalyzer = indexAnalyzer; - this.searchAnalyzer = searchAnalyzer; - this.searchQuoteAnalyzer = searchQuoteAnalyzer; - } - - public DocumentFieldMappers copyAndAllAll(Collection newMappers) { - CopyOnWriteHashMap map = this.fieldMappers; - for (FieldMapper fieldMapper : newMappers) { - map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper); + private static void put(Map analyzers, String key, Analyzer value, Analyzer defaultValue) { + if (value == null) { + value = defaultValue; } - FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer()) - )); - FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer()) - )); - FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer()) - )); - return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer); + analyzers.put(key, value); } -/** Returns the mapper for the given field */ + public DocumentFieldMappers(Collection mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) { + Map fieldMappers = new HashMap<>(); + Map indexAnalyzers = new HashMap<>(); + Map searchAnalyzers = new HashMap<>(); + Map searchQuoteAnalyzers = new HashMap<>(); + for (FieldMapper mapper : mappers) { + fieldMappers.put(mapper.name(), mapper); + MappedFieldType fieldType = mapper.fieldType(); + put(indexAnalyzers, fieldType.names().indexName(), fieldType.indexAnalyzer(), defaultIndex); + put(searchAnalyzers, fieldType.names().indexName(), fieldType.searchAnalyzer(), defaultSearch); + put(searchQuoteAnalyzers, fieldType.names().indexName(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); + } + this.fieldMappers = Collections.unmodifiableMap(fieldMappers); + this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers); + this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers); + this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers); + } + + /** Returns the mapper for the given field */ public FieldMapper getMapper(String field) { return fieldMappers.get(field); } @@ -112,14 +106,6 @@ public final class DocumentFieldMappers implements Iterable { return this.indexAnalyzer; } - /** - * A smart analyzer used for indexing that takes into account specific analyzers configured - * per {@link FieldMapper} with a custom default analyzer for no explicit field analyzer. - */ - public Analyzer indexAnalyzer(Analyzer defaultAnalyzer) { - return new FieldNameAnalyzer(indexAnalyzer.analyzers(), defaultAnalyzer); - } - /** * A smart analyzer used for searching that takes into account specific analyzers configured * per {@link FieldMapper}. diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 756bd486316..c2d644d393d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -24,16 +24,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.Version; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.IdFieldMapper; @@ -51,15 +50,12 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.locks.ReentrantReadWriteLock; import static java.util.Collections.emptyMap; @@ -72,16 +68,14 @@ public class DocumentMapper implements ToXContent { private Map, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>(); - private final Settings indexSettings; - private final RootObjectMapper rootObjectMapper; private Map meta = emptyMap(); private final Mapper.BuilderContext builderContext; - public Builder(Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) { - this.indexSettings = indexSettings; + public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { + final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); @@ -104,9 +98,14 @@ public class DocumentMapper implements ToXContent { return this; } - public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) { + public DocumentMapper build(MapperService mapperService) { Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); - return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, metadataMappers, mapperService.mappingLock); + Mapping mapping = new Mapping( + mapperService.getIndexSettings().getIndexVersionCreated(), + rootObjectMapper, + metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]), + meta); + return new DocumentMapper(mapperService, mapping); } } @@ -115,38 +114,25 @@ public class DocumentMapper implements ToXContent { private final String type; private final Text typeText; - private volatile CompressedXContent mappingSource; + private final CompressedXContent mappingSource; - private volatile Mapping mapping; + private final Mapping mapping; private final DocumentParser documentParser; - private volatile DocumentFieldMappers fieldMappers; + private final DocumentFieldMappers fieldMappers; - private volatile Map objectMappers = Collections.emptyMap(); + private final Map objectMappers; - private boolean hasNestedObjects = false; + private final boolean hasNestedObjects; - private final ReleasableLock mappingWriteLock; - private final ReentrantReadWriteLock mappingLock; - - public DocumentMapper(MapperService mapperService, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, - RootObjectMapper rootObjectMapper, - Map meta, - Map, MetadataFieldMapper> metadataMappers, - ReentrantReadWriteLock mappingLock) { + public DocumentMapper(MapperService mapperService, Mapping mapping) { this.mapperService = mapperService; - this.type = rootObjectMapper.name(); + this.type = mapping.root().name(); this.typeText = new Text(this.type); - this.mapping = new Mapping( - Version.indexCreated(indexSettings), - rootObjectMapper, - metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]), - meta); - this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock())); - - this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); - this.mappingLock = mappingLock; + final IndexSettings indexSettings = mapperService.getIndexSettings(); + this.mapping = mapping; + this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this); if (metadataMapper(ParentFieldMapper.class).active()) { // mark the routing field mapper as required @@ -163,7 +149,11 @@ public class DocumentMapper implements ToXContent { } MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); - this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers); + final AnalysisService analysisService = mapperService.analysisService(); + this.fieldMappers = new DocumentFieldMappers(newFieldMappers, + analysisService.defaultIndexAnalyzer(), + analysisService.defaultSearchAnalyzer(), + analysisService.defaultSearchQuoteAnalyzer()); Map builder = new HashMap<>(); for (ObjectMapper objectMapper : newObjectMappers) { @@ -173,14 +163,20 @@ public class DocumentMapper implements ToXContent { } } + boolean hasNestedObjects = false; this.objectMappers = Collections.unmodifiableMap(builder); for (ObjectMapper objectMapper : newObjectMappers) { if (objectMapper.nested().isNested()) { hasNestedObjects = true; } } + this.hasNestedObjects = hasNestedObjects; - refreshSource(); + try { + mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); + } catch (Exception e) { + throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); + } } public Mapping mapping() { @@ -334,46 +330,17 @@ public class DocumentMapper implements ToXContent { return mapperService.getParentTypes().contains(type); } - private void addMappers(Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { - assert mappingLock.isWriteLockedByCurrentThread(); - - // update mappers for this document type - Map builder = new HashMap<>(this.objectMappers); - for (ObjectMapper objectMapper : objectMappers) { - builder.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNestedObjects = true; - } - } - this.objectMappers = Collections.unmodifiableMap(builder); - this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); - - // finally update for the entire index - mapperService.addMappers(type, objectMappers, fieldMappers); + public DocumentMapper merge(Mapping mapping, boolean updateAllTypes) { + Mapping merged = this.mapping.merge(mapping, updateAllTypes); + return new DocumentMapper(mapperService, merged); } - public void merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { - try (ReleasableLock lock = mappingWriteLock.acquire()) { - mapperService.checkMappersCompatibility(type, mapping, updateAllTypes); - // do the merge even if simulate == false so that we get exceptions - Mapping merged = this.mapping.merge(mapping, updateAllTypes); - if (simulate == false) { - this.mapping = merged; - Collection objectMappers = new ArrayList<>(); - Collection fieldMappers = new ArrayList<>(Arrays.asList(merged.metadataMappers)); - MapperUtils.collect(merged.root, objectMappers, fieldMappers); - addMappers(objectMappers, fieldMappers, updateAllTypes); - refreshSource(); - } - } - } - - private void refreshSource() throws ElasticsearchGenerationException { - try { - mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); - } catch (Exception e) { - throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); - } + /** + * Recursively update sub field types. + */ + public DocumentMapper updateFieldType(Map fullNameToFieldType) { + Mapping updated = this.mapping.updateFieldType(fullNameToFieldType); + return new DocumentMapper(mapperService, updated); } public void close() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index d7cc5eb8c93..7bb7b0b54a3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -46,7 +45,6 @@ import static org.elasticsearch.index.mapper.MapperBuilders.doc; public class DocumentMapperParser { - private final Settings indexSettings; final MapperService mapperService; final AnalysisService analysisService; private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class); @@ -62,8 +60,7 @@ public class DocumentMapperParser { public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService, SimilarityService similarityService, MapperRegistry mapperRegistry) { - this.indexSettings = indexSettings.getSettings(); - this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings); + this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings()); this.mapperService = mapperService; this.analysisService = analysisService; this.similarityService = similarityService; @@ -110,7 +107,7 @@ public class DocumentMapperParser { Mapper.TypeParser.ParserContext parserContext = parserContext(type); // parse RootObjectMapper - DocumentMapper.Builder docBuilder = doc(indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService); + DocumentMapper.Builder docBuilder = doc((RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService); Iterator> iterator = mapping.entrySet().iterator(); // parse DocumentMapper while(iterator.hasNext()) { @@ -137,7 +134,7 @@ public class DocumentMapperParser { checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: "); - return docBuilder.build(mapperService, this); + return docBuilder.build(mapperService); } public static void checkNoRemainingFields(String fieldName, Map fieldNodeMap, Version indexVersionCreated) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index bb1749d2336..4eb3100c99c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -26,10 +26,9 @@ import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -53,29 +52,21 @@ class DocumentParser implements Closeable { private CloseableThreadLocal cache = new CloseableThreadLocal() { @Override protected ParseContext.InternalParseContext initialValue() { - return new ParseContext.InternalParseContext(indexSettings, docMapperParser, docMapper, new ContentPath(0)); + return new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0)); } }; - private final Settings indexSettings; + private final IndexSettings indexSettings; private final DocumentMapperParser docMapperParser; private final DocumentMapper docMapper; - private final ReleasableLock parseLock; - public DocumentParser(Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ReleasableLock parseLock) { + public DocumentParser(IndexSettings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper) { this.indexSettings = indexSettings; this.docMapperParser = docMapperParser; this.docMapper = docMapper; - this.parseLock = parseLock; } public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException { - try (ReleasableLock lock = parseLock.acquire()){ - return innerParseDocument(source); - } - } - - private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException { if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); } @@ -132,7 +123,7 @@ class DocumentParser implements Closeable { // try to parse the next token, this should be null if the object is ended properly // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch) - if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 93de39d0f9e..9c77a416bf1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -44,6 +44,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { @@ -267,7 +268,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } } - protected MappedFieldTypeReference fieldTypeRef; + protected MappedFieldType fieldType; protected final MappedFieldType defaultFieldType; protected MultiFields multiFields; protected CopyTo copyTo; @@ -277,7 +278,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { super(simpleName); assert indexSettings != null; this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); - this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type + fieldType.freeze(); + this.fieldType = fieldType; defaultFieldType.freeze(); this.defaultFieldType = defaultFieldType; this.multiFields = multiFields; @@ -290,23 +292,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { } public MappedFieldType fieldType() { - return fieldTypeRef.get(); - } - - /** Returns a reference to the MappedFieldType for this mapper. */ - public MappedFieldTypeReference fieldTypeReference() { - return fieldTypeRef; - } - - /** - * Updates the reference to this field's MappedFieldType. - * Implementations should assert equality of the underlying field type - */ - public void setFieldTypeReference(MappedFieldTypeReference ref) { - if (ref.get().equals(fieldType()) == false) { - throw new IllegalStateException("Cannot overwrite field type reference to unequal reference"); - } - this.fieldTypeRef = ref; + return fieldType; } /** @@ -350,10 +336,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return false; } + @Override public Iterator iterator() { - if (multiFields == null) { - return Collections.emptyIterator(); - } return multiFields.iterator(); } @@ -389,12 +373,26 @@ public abstract class FieldMapper extends Mapper implements Cloneable { multiFields = multiFields.merge(fieldMergeWith.multiFields); // apply changeable values - MappedFieldType fieldType = fieldMergeWith.fieldType().clone(); - fieldType.freeze(); - fieldTypeRef.set(fieldType); + this.fieldType = fieldMergeWith.fieldType; this.copyTo = fieldMergeWith.copyTo; } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.names().fullName()); + if (newFieldType == null) { + throw new IllegalStateException(); + } + MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType); + if (fieldType == newFieldType && multiFields == updatedMultiFields) { + return this; // no change + } + FieldMapper updated = clone(); + updated.fieldType = newFieldType; + updated.multiFields = updatedMultiFields; + return updated; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(simpleName()); @@ -619,6 +617,27 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return new MultiFields(mappers); } + public MultiFields updateFieldType(Map fullNameToFieldType) { + ImmutableOpenMap.Builder newMappersBuilder = null; + + for (ObjectCursor cursor : mappers.values()) { + FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType); + if (updated != cursor.value) { + if (newMappersBuilder == null) { + newMappersBuilder = ImmutableOpenMap.builder(mappers); + } + newMappersBuilder.put(updated.simpleName(), updated); + } + } + + if (newMappersBuilder == null) { + return this; + } + + ImmutableOpenMap mappers = newMappersBuilder.build(); + return new MultiFields(mappers); + } + public Iterator iterator() { return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index da21e599cc9..e06b4e799ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -37,16 +37,16 @@ import java.util.Set; class FieldTypeLookup implements Iterable { /** Full field name to field type */ - private final CopyOnWriteHashMap fullNameToFieldType; + final CopyOnWriteHashMap fullNameToFieldType; /** Full field name to types containing a mapping for this full name. */ - private final CopyOnWriteHashMap> fullNameToTypes; + final CopyOnWriteHashMap> fullNameToTypes; /** Index field name to field type */ - private final CopyOnWriteHashMap indexNameToFieldType; + final CopyOnWriteHashMap indexNameToFieldType; /** Index field name to types containing a mapping for this index name. */ - private final CopyOnWriteHashMap> indexNameToTypes; + final CopyOnWriteHashMap> indexNameToTypes; /** Create a new empty instance. */ public FieldTypeLookup() { @@ -57,9 +57,9 @@ class FieldTypeLookup implements Iterable { } private FieldTypeLookup( - CopyOnWriteHashMap fullName, + CopyOnWriteHashMap fullName, CopyOnWriteHashMap> fullNameToTypes, - CopyOnWriteHashMap indexName, + CopyOnWriteHashMap indexName, CopyOnWriteHashMap> indexNameToTypes) { this.fullNameToFieldType = fullName; this.fullNameToTypes = fullNameToTypes; @@ -89,43 +89,35 @@ class FieldTypeLookup implements Iterable { * from the provided fields. If a field already exists, the field type will be updated * to use the new mappers field type. */ - public FieldTypeLookup copyAndAddAll(String type, Collection newFieldMappers) { + public FieldTypeLookup copyAndAddAll(String type, Collection fieldMappers, boolean updateAllTypes) { Objects.requireNonNull(type, "type must not be null"); if (MapperService.DEFAULT_MAPPING.equals(type)) { throw new IllegalArgumentException("Default mappings should not be added to the lookup"); } - CopyOnWriteHashMap fullName = this.fullNameToFieldType; + + CopyOnWriteHashMap fullName = this.fullNameToFieldType; CopyOnWriteHashMap> fullNameToTypes = this.fullNameToTypes; - CopyOnWriteHashMap indexName = this.indexNameToFieldType; + CopyOnWriteHashMap indexName = this.indexNameToFieldType; CopyOnWriteHashMap> indexNameToTypes = this.indexNameToTypes; - for (FieldMapper fieldMapper : newFieldMappers) { + for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); - MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName()); - MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName()); - if (fullNameRef == null && indexNameRef == null) { - // new field, just use the ref from this field mapper - fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference()); - indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference()); - } else if (fullNameRef == null) { - // this index name already exists, so copy over the reference - fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef); - indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(indexNameRef); - } else if (indexNameRef == null) { - // this full name already exists, so copy over the reference - indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef); - fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(fullNameRef); - } else if (fullNameRef == indexNameRef) { - // the field already exists, so replace the reference in this mapper with the pre-existing one - fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(fullNameRef); - } else { + MappedFieldType fullNameFieldType = fullName.get(fieldType.names().fullName()); + MappedFieldType indexNameFieldType = indexName.get(fieldType.names().indexName()); + + if (fullNameFieldType != null && indexNameFieldType != null && fullNameFieldType != indexNameFieldType) { // this new field bridges between two existing field names (a full and index name), which we cannot support throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName()); } + // is the update even legal? + checkCompatibility(type, fieldMapper, updateAllTypes); + + if (fieldType != fullNameFieldType || fieldType != indexNameFieldType) { + fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldType()); + indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldType()); + } + fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type); indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type); } @@ -145,42 +137,38 @@ class FieldTypeLookup implements Iterable { } /** - * Checks if the given mappers' field types are compatible with existing field types. - * If any are not compatible, an IllegalArgumentException is thrown. + * Checks if the given field type is compatible with an existing field type. + * An IllegalArgumentException is thrown in case of incompatibility. * If updateAllTypes is true, only basic compatibility is checked. */ - public void checkCompatibility(String type, Collection fieldMappers, boolean updateAllTypes) { - for (FieldMapper fieldMapper : fieldMappers) { - MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); - if (ref != null) { - List conflicts = new ArrayList<>(); - final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); - boolean strict = beStrict(type, types, updateAllTypes); - ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); - } + private void checkCompatibility(String type, FieldMapper fieldMapper, boolean updateAllTypes) { + MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); + if (fieldType != null) { + List conflicts = new ArrayList<>(); + final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); + boolean strict = beStrict(type, types, updateAllTypes); + fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); } + } - // field type for the index name must be compatible too - MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); - if (indexNameRef != null) { - List conflicts = new ArrayList<>(); - final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); - boolean strict = beStrict(type, types, updateAllTypes); - indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); - } + // field type for the index name must be compatible too + fieldType = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); + if (fieldType != null) { + List conflicts = new ArrayList<>(); + final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); + boolean strict = beStrict(type, types, updateAllTypes); + fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); } } } /** Returns the field for the given field */ public MappedFieldType get(String field) { - MappedFieldTypeReference ref = fullNameToFieldType.get(field); - if (ref == null) return null; - return ref.get(); + return fullNameToFieldType.get(field); } /** Get the set of types that have a mapping for the given field. */ @@ -194,9 +182,7 @@ class FieldTypeLookup implements Iterable { /** Returns the field type for the given index name */ public MappedFieldType getByIndexName(String field) { - MappedFieldTypeReference ref = indexNameToFieldType.get(field); - if (ref == null) return null; - return ref.get(); + return indexNameToFieldType.get(field); } /** Get the set of types that have a mapping for the given field. */ @@ -238,7 +224,8 @@ class FieldTypeLookup implements Iterable { return fields; } + @Override public Iterator iterator() { - return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator(); + return fullNameToFieldType.values().iterator(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java deleted file mode 100644 index 1a9d0b70b37..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -/** - * A container for a {@link MappedFieldType} which can be updated and is reference counted. - */ -public class MappedFieldTypeReference { - private MappedFieldType fieldType; // the current field type this reference points to - - public MappedFieldTypeReference(MappedFieldType fieldType) { - fieldType.freeze(); // ensure frozen - this.fieldType = fieldType; - } - - public MappedFieldType get() { - return fieldType; - } - - public void set(MappedFieldType fieldType) { - fieldType.freeze(); // ensure frozen - this.fieldType = fieldType; - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 4c3aa3c56bb..ffdae90c436 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -177,4 +177,11 @@ public abstract class Mapper implements ToXContent, Iterable { /** Return the merge of {@code mergeWith} into this. * Both {@code this} and {@code mergeWith} will be left unmodified. */ public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes); + + /** + * Update the field type of this mapper. This is necessary because some mapping updates + * can modify mappings across several types. This method must return a copy of the mapper + * so that the current mapper is not modified. + */ + public abstract Mapper updateFieldType(Map fullNameToFieldType); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java index 75d2cb43937..9ea9e99f01b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.core.ByteFieldMapper; @@ -41,8 +40,8 @@ public final class MapperBuilders { private MapperBuilders() {} - public static DocumentMapper.Builder doc(Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) { - return new DocumentMapper.Builder(settings, objectBuilder, mapperService); + public static DocumentMapper.Builder doc(RootObjectMapper.Builder objectBuilder, MapperService mapperService) { + return new DocumentMapper.Builder(objectBuilder, mapperService); } public static RootObjectMapper.Builder rootObject(String name) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index c3622b4a5b8..afaaca1b1ca 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -35,11 +35,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -65,7 +63,6 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; import java.util.stream.Collectors; @@ -98,12 +95,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private volatile Map mappers = emptyMap(); - // A lock for mappings: modifications (put mapping) need to be performed - // under the write lock and read operations (document parsing) need to be - // performed under the read lock - final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock(); - private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); - private volatile FieldTypeLookup fieldTypes; private volatile Map fullPathObjectMappers = new HashMap<>(); private boolean hasNested = false; // updated dynamically to true when a nested object is added @@ -216,7 +207,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { DocumentMapper mapper = documentParser.parse(type, mappingSource); // still add it as a document mapper so we have it registered and, for example, persisted back into // the cluster meta data if needed, or checked for existence - try (ReleasableLock lock = mappingWriteLock.acquire()) { + synchronized (this) { mappers = newMapBuilder(mappers).put(type, mapper).map(); } try { @@ -226,7 +217,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } return mapper; } else { - try (ReleasableLock lock = mappingWriteLock.acquire()) { + synchronized (this) { // only apply the default mapping if we don't have the type yet applyDefault &= mappers.containsKey(type) == false; return merge(parse(type, mappingSource, applyDefault), updateAllTypes); @@ -234,9 +225,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - // never expose this to the outside world, we need to reparse the doc mapper so we get fresh - // instances of field mappers to properly remove existing doc mapper - private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { + private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } @@ -262,34 +251,89 @@ public class MapperService extends AbstractIndexComponent implements Closeable { logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); } } - // we can add new field/object mappers while the old ones are there - // since we get new instances of those, and when we remove, we remove - // by instance equality + + // 1. compute the merged DocumentMapper DocumentMapper oldMapper = mappers.get(mapper.type()); - + DocumentMapper newMapper; if (oldMapper != null) { - oldMapper.merge(mapper.mapping(), false, updateAllTypes); - return oldMapper; + newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes); } else { - Tuple, Collection> newMappers = checkMappersCompatibility( - mapper.type(), mapper.mapping(), updateAllTypes); - Collection newObjectMappers = newMappers.v1(); - Collection newFieldMappers = newMappers.v2(); - addMappers(mapper.type(), newObjectMappers, newFieldMappers); + newMapper = mapper; + } + // 2. check basic sanity of the new mapping + List objectMappers = new ArrayList<>(); + List fieldMappers = new ArrayList<>(); + Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); + MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); + checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers); + checkObjectsCompatibility(newMapper.type(), objectMappers, fieldMappers, updateAllTypes); + + // 3. update lookup data-structures + // this will in particular make sure that the merged fields are compatible with other types + FieldTypeLookup fieldTypes = this.fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes); + + boolean hasNested = this.hasNested; + Map fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); + for (ObjectMapper objectMapper : objectMappers) { + fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); + if (objectMapper.nested().isNested()) { + hasNested = true; + } + } + fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + Set parentTypes = this.parentTypes; + if (oldMapper == null && newMapper.parentFieldMapper().active()) { + parentTypes = new HashSet<>(parentTypes.size() + 1); + parentTypes.addAll(this.parentTypes); + parentTypes.add(mapper.parentFieldMapper().type()); + parentTypes = Collections.unmodifiableSet(parentTypes); + } + + Map mappers = new HashMap<>(this.mappers); + mappers.put(newMapper.type(), newMapper); + for (Map.Entry entry : mappers.entrySet()) { + if (entry.getKey().equals(DEFAULT_MAPPING)) { + continue; + } + DocumentMapper m = entry.getValue(); + // apply changes to the field types back + m = m.updateFieldType(fieldTypes.fullNameToFieldType); + entry.setValue(m); + } + mappers = Collections.unmodifiableMap(mappers); + + // 4. commit the change + this.mappers = mappers; + this.fieldTypes = fieldTypes; + this.hasNested = hasNested; + this.fullPathObjectMappers = fullPathObjectMappers; + this.parentTypes = parentTypes; + + // 5. send notifications about the change + if (oldMapper == null) { + // means the mapping was created for (DocumentTypeListener typeListener : typeListeners) { typeListener.beforeCreate(mapper); } - mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); - if (mapper.parentFieldMapper().active()) { - Set newParentTypes = new HashSet<>(parentTypes.size() + 1); - newParentTypes.addAll(parentTypes); - newParentTypes.add(mapper.parentFieldMapper().type()); - parentTypes = unmodifiableSet(newParentTypes); - } - assert assertSerialization(mapper); - return mapper; } + + assert assertSerialization(newMapper); + assert assertMappersShareSameFieldType(); + + return newMapper; + } + + private boolean assertMappersShareSameFieldType() { + for (DocumentMapper mapper : docMappers(false)) { + List fieldMappers = new ArrayList<>(); + Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); + MapperUtils.collect(mapper.root(), new ArrayList(), fieldMappers); + for (FieldMapper fieldMapper : fieldMappers) { + assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); + } + } + return true; } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { @@ -339,8 +383,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - protected void checkMappersCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { - assert mappingLock.isWriteLockedByCurrentThread(); + private void checkObjectsCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { + assert Thread.holdsLock(this); checkFieldUniqueness(type, objectMappers, fieldMappers); @@ -358,31 +402,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); } } - - fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes); - } - - protected Tuple, Collection> checkMappersCompatibility( - String type, Mapping mapping, boolean updateAllTypes) { - List objectMappers = new ArrayList<>(); - List fieldMappers = new ArrayList<>(); - Collections.addAll(fieldMappers, mapping.metadataMappers); - MapperUtils.collect(mapping.root, objectMappers, fieldMappers); - checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes); - return new Tuple<>(objectMappers, fieldMappers); - } - - protected void addMappers(String type, Collection objectMappers, Collection fieldMappers) { - assert mappingLock.isWriteLockedByCurrentThread(); - Map fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); - for (ObjectMapper objectMapper : objectMappers) { - fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNested = true; - } - } - this.fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); - this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers); } public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java index d33a97a4151..25ea4b7d398 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -93,7 +93,7 @@ public final class Mapping implements ToXContent { return (T) metadataMappersMap.get(clazz); } - /** @see DocumentMapper#merge(Mapping, boolean, boolean) */ + /** @see DocumentMapper#merge(Mapping, boolean) */ public Mapping merge(Mapping mergeWith, boolean updateAllTypes) { RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes); Map, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap); @@ -110,6 +110,18 @@ public final class Mapping implements ToXContent { return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta); } + /** + * Recursively update sub field types. + */ + public Mapping updateFieldType(Map fullNameToFieldType) { + final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length); + for (int i = 0; i < updatedMeta.length; ++i) { + updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType); + } + RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType); + return new Mapping(indexCreated, updatedRoot, updatedMeta, meta); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { root.toXContent(builder, params, new ToXContent() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 6f413683d63..79261ec3809 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -346,11 +346,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } } - protected final DoubleFieldMapper latMapper; + protected DoubleFieldMapper latMapper; - protected final DoubleFieldMapper lonMapper; + protected DoubleFieldMapper lonMapper; - protected final StringFieldMapper geoHashMapper; + protected StringFieldMapper geoHashMapper; protected Explicit ignoreMalformed; @@ -504,4 +504,25 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } + + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); + StringFieldMapper geoUpdated = geoHashMapper == null ? null : (StringFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType); + DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType); + DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType); + if (updated == this + && geoUpdated == geoHashMapper + && latUpdated == latMapper + && lonUpdated == lonMapper) { + return this; + } + if (updated == this) { + updated = (BaseGeoPointFieldMapper) updated.clone(); + } + updated.geoHashMapper = geoUpdated; + updated.latMapper = latUpdated; + updated.lonMapper = lonUpdated; + return updated; + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index e03439f3f54..d8d61f4bab3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -216,7 +216,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { FieldNamesFieldType newFieldType = fieldType().clone(); newFieldType.setEnabled(false); newFieldType.freeze(); - fieldTypeRef.set(newFieldType); + this.fieldType = newFieldType; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index e82ecf0097a..9fdb9b586e4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -95,8 +95,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper { private boolean explicitStore = false; private Boolean ignoreMissing = null; - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); + public Builder(MappedFieldType existing, Settings settings) { + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, chooseFieldType(settings, null)); if (existing != null) { // if there is an existing type, always use that store value (only matters for < 2.0) explicitStore = true; @@ -167,7 +167,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); + Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings()); if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { parseField(builder, builder.name, node, parserContext); } @@ -260,7 +260,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { private final Boolean ignoreMissing; private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); + this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null).clone(), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); } private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path, diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 519ac0f5e24..9f3b503ab49 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -493,6 +494,28 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } } + @Override + public ObjectMapper updateFieldType(Map fullNameToFieldType) { + List updatedMappers = null; + for (Mapper mapper : this) { + Mapper updated = mapper.updateFieldType(fullNameToFieldType); + if (mapper != updated) { + if (updatedMappers == null) { + updatedMappers = new ArrayList<>(); + } + updatedMappers.add(updated); + } + } + if (updatedMappers == null) { + return this; + } + ObjectMapper updated = clone(); + for (Mapper updatedMapper : updatedMappers) { + updated.putMapper(updatedMapper); + } + return updated; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { toXContent(builder, params, null); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 90030d40a05..8183a2179a2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -295,6 +296,11 @@ public class RootObjectMapper extends ObjectMapper { this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); } + @Override + public RootObjectMapper updateFieldType(Map fullNameToFieldType) { + return (RootObjectMapper) super.updateFieldType(fullNameToFieldType); + } + @Override protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { if (dynamicDateTimeFormatters != Defaults.DYNAMIC_DATE_TIME_FORMATTERS) { diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 13fce88e2c1..889cf74a7b4 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1931,9 +1931,8 @@ public class InternalEngineTests extends ESTestCase { SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry); - DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService); - DocumentMapperParser parser = mapperService.documentMapperParser(); - this.docMapper = b.build(mapperService, parser); + DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService); + this.docMapper = b.build(mapperService); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java new file mode 100644 index 00000000000..90976dac4b1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.document.Field; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.analysis.NamedAnalyzer; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Arrays; +import java.util.List; + +public class DocumentFieldMapperTests extends LuceneTestCase { + + private static class FakeAnalyzer extends Analyzer { + + private final String output; + + public FakeAnalyzer(String output) { + this.output = output; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new Tokenizer() { + boolean incremented = false; + CharTermAttribute term = addAttribute(CharTermAttribute.class); + + @Override + public boolean incrementToken() throws IOException { + if (incremented) { + return false; + } + term.setLength(0).append(output); + incremented = true; + return true; + } + }; + return new TokenStreamComponents(tokenizer); + } + + } + + static class FakeFieldType extends MappedFieldType { + + public FakeFieldType() { + super(); + } + + FakeFieldType(FakeFieldType other) { + super(other); + } + + @Override + public MappedFieldType clone() { + return new FakeFieldType(this); + } + + @Override + public String typeName() { + return "fake"; + } + + } + + static class FakeFieldMapper extends FieldMapper { + + private static final Settings SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + + public FakeFieldMapper(String simpleName, MappedFieldType fieldType) { + super(simpleName, fieldType.clone(), fieldType.clone(), SETTINGS, null, null); + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + } + + @Override + protected String contentType() { + return null; + } + + } + + public void testAnalyzers() throws IOException { + FakeFieldType fieldType1 = new FakeFieldType(); + fieldType1.setNames(new MappedFieldType.Names("field1")); + fieldType1.setIndexAnalyzer(new NamedAnalyzer("foo", new FakeAnalyzer("index"))); + fieldType1.setSearchAnalyzer(new NamedAnalyzer("bar", new FakeAnalyzer("search"))); + fieldType1.setSearchQuoteAnalyzer(new NamedAnalyzer("baz", new FakeAnalyzer("search_quote"))); + FieldMapper fieldMapper1 = new FakeFieldMapper("field1", fieldType1); + + FakeFieldType fieldType2 = new FakeFieldType(); + fieldType2.setNames(new MappedFieldType.Names("field2")); + FieldMapper fieldMapper2 = new FakeFieldMapper("field2", fieldType2); + + Analyzer defaultIndex = new FakeAnalyzer("default_index"); + Analyzer defaultSearch = new FakeAnalyzer("default_search"); + Analyzer defaultSearchQuote = new FakeAnalyzer("default_search_quote"); + + DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(Arrays.asList(fieldMapper1, fieldMapper2), defaultIndex, defaultSearch, defaultSearchQuote); + + assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index"); + assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field1", "search"); + assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field1", "search_quote"); + + assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field2", "default_index"); + assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field2", "default_search"); + assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field2", "default_search_quote"); + } + + private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException { + try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) { + CharTermAttribute term = tok.addAttribute(CharTermAttribute.class); + assertTrue(tok.incrementToken()); + assertEquals(output, term.toString()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 5a31618f14e..8452c836041 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -31,6 +31,8 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import static org.hamcrest.Matchers.containsString; + public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { @@ -53,7 +55,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testDefaultMapping() { FieldTypeLookup lookup = new FieldTypeLookup(); try { - lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); + lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList(), randomBoolean()); fail(); } catch (IllegalArgumentException expected) { assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); @@ -63,7 +65,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); assertNull(lookup.getByIndexName("foo")); @@ -85,94 +87,77 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddExistingField() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - MappedFieldType originalFieldType = f.fieldType(); FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); + assertSame(f2.fieldType(), lookup2.get("foo")); + assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(1, size(lookup2.iterator())); } public void testAddExistingIndexName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); - MappedFieldType originalFieldType = f.fieldType(); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.get("bar")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); + assertSame(f2.fieldType(), lookup2.get("bar")); + assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(2, size(lookup2.iterator())); } public void testAddExistingFullName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); - MappedFieldType originalFieldType = f.fieldType(); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); - - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("bar")); - assertEquals(1, size(lookup2.iterator())); + try { + lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]")); + } } public void testAddExistingBridgeName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f, f2)); + lookup = lookup.copyAndAddAll("type1", newList(f, f2), randomBoolean()); try { FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); - lookup.copyAndAddAll("type2", newList(f3)); + lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } try { FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); - lookup.copyAndAddAll("type2", newList(f3)); + lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } } - public void testCheckCompatibilityNewField() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup.checkCompatibility("type", newList(f1), false); - } - public void testCheckCompatibilityMismatchedTypes() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { - lookup.checkCompatibility("type2", newList(f2), false); + lookup.copyAndAddAll("type2", newList(f2), false); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } // fails even if updateAllTypes == true try { - lookup.checkCompatibility("type2", newList(f2), true); + lookup.copyAndAddAll("type2", newList(f2), true); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); @@ -182,33 +167,33 @@ public class FieldTypeLookupTests extends ESTestCase { public void testCheckCompatibilityConflict() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); ft2.setBoost(2.0f); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { // different type - lookup.checkCompatibility("type2", newList(f2), false); + lookup.copyAndAddAll("type2", newList(f2), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("to update [boost] across all types")); } - lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types - lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing + lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types + lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); ft3.setStored(true); FieldMapper f3 = new FakeFieldMapper("foo", ft3); try { - lookup.checkCompatibility("type2", newList(f3), false); + lookup.copyAndAddAll("type2", newList(f3), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } // even with updateAllTypes == true, incompatible try { - lookup.checkCompatibility("type2", newList(f3), true); + lookup.copyAndAddAll("type2", newList(f3), true); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); @@ -219,7 +204,7 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection names = lookup.simpleMatchToIndexNames("b*"); assertTrue(names.contains("baz")); assertTrue(names.contains("boo")); @@ -229,7 +214,7 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection names = lookup.simpleMatchToFullName("b*"); assertTrue(names.contains("foo")); assertTrue(names.contains("bar")); @@ -238,7 +223,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testIteratorImmutable() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); try { Iterator itr = lookup.iterator(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java index 4fe0f9b77b4..ea142d6f441 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java @@ -44,6 +44,7 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + documentMapper = index.mapperService().documentMapper("type"); assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index 149d0be305f..daf54d501d7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; @@ -39,6 +40,7 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -127,6 +129,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { assertNotNull(parsedDoc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type1").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("type1"); fieldMapper = docMapper.mappers().getMapper("new_field"); assertThat(fieldMapper, instanceOf(LongFieldMapper.class)); } @@ -308,27 +311,15 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapperBefore = parser.parse("type1", new CompressedXContent(mappingBefore)); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), true, false); - List fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); + assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); - assertThat(fields.size(), equalTo(2)); - assertThat(fields.get(0), equalTo("foo")); - assertThat(fields.get(1), equalTo("bar")); + DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), false, false); - - DocumentMapper docMapperAfter = parser.parse("type1", new CompressedXContent(mappingAfter)); - - docMapperBefore.merge(docMapperAfter.mapping(), true, false); - - docMapperBefore.merge(docMapperAfter.mapping(), false, false); - - fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); - - assertThat(fields.size(), equalTo(2)); - assertThat(fields.get(0), equalTo("baz")); - assertThat(fields.get(1), equalTo("bar")); + assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); } public void testCopyToNestedField() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index 25a9adf7125..a746717b73a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -50,8 +51,8 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), true, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -62,15 +63,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); + DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), false, false); - stage1.merge(stage2.mapping(), true, false); - // Just simulated so merge hasn't happened yet + // previous mapper has not been modified assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); - - stage1.merge(stage2.mapping(), false, false); - // Just simulated so merge hasn't happened yet - assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); + // but the new one has the change + assertThat(((TokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); } public void testCountPositions() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index a4009c8a861..091c1ca2801 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -80,7 +80,9 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .startObject("properties").endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = mapper("test", "type", mapping); + IndexService index = createIndex("test"); + client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); + DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -94,6 +96,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper; @@ -384,7 +387,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { Map config = getConfigurationViaXContent(initialDateFieldMapper); assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); - defaultMapper.merge(mergeMapper.mapping(), false, false); + defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false); assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java index d07e6177814..da5c53f46f9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java @@ -44,6 +44,7 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java index 829730e68cd..75dd396d8dd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java @@ -44,6 +44,7 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java index 014f0295808..250b7a8d28a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java @@ -55,6 +55,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", builder.bytes()); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); DocumentFieldMappers mappers = docMapper.mappers(); assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue()); @@ -74,6 +75,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -130,6 +132,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index dc6c720402e..c4b04000eb6 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; @@ -160,11 +161,11 @@ public class ExternalMapper extends FieldMapper { private final String generatedValue; private final String mapperName; - private final BinaryFieldMapper binMapper; - private final BooleanFieldMapper boolMapper; - private final BaseGeoPointFieldMapper pointMapper; - private final GeoShapeFieldMapper shapeMapper; - private final FieldMapper stringMapper; + private BinaryFieldMapper binMapper; + private BooleanFieldMapper boolMapper; + private BaseGeoPointFieldMapper pointMapper; + private GeoShapeFieldMapper shapeMapper; + private FieldMapper stringMapper; public ExternalMapper(String simpleName, MappedFieldType fieldType, String generatedValue, String mapperName, @@ -216,6 +217,36 @@ public class ExternalMapper extends FieldMapper { // ignore this for now } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType); + MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType); + BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); + BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); + GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); + GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); + StringFieldMapper stringMapperUpdate = (StringFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); + if (update == this + && multiFieldsUpdate == multiFields + && binMapperUpdate == binMapper + && boolMapperUpdate == boolMapper + && pointMapperUpdate == pointMapper + && shapeMapperUpdate == shapeMapper + && stringMapperUpdate == stringMapper) { + return this; + } + if (update == this) { + update = (ExternalMapper) clone(); + } + update.multiFields = multiFieldsUpdate; + update.binMapper = binMapperUpdate; + update.boolMapper = boolMapperUpdate; + update.pointMapper = pointMapperUpdate; + update.shapeMapper = shapeMapperUpdate; + update.stringMapper = stringMapperUpdate; + return update; + } + @Override public Iterator iterator() { return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index 7797762606a..6ff0a428f79 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -53,23 +53,11 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE, indexSettings); } - @Override - public String name() { - return CONTENT_TYPE; - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { // handled in post parse } - @Override - public void doMerge(Mapper mergeWith, boolean updateAllTypes) { - if (!(mergeWith instanceof ExternalMetadataMapper)) { - throw new IllegalArgumentException("Trying to merge " + mergeWith + " with " + this); - } - } - @Override public Iterator iterator() { return Collections.emptyIterator(); @@ -97,7 +85,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { protected Builder() { - super(CONTENT_TYPE, FIELD_TYPE, FIELD_TYPE); + super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index 380a1e04ad3..76dba99e05b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -376,7 +376,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") .endObject().endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) @@ -392,7 +392,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { } // verify nothing changed - FieldMapper fieldMapper = stage1.mappers().getMapper("shape"); + FieldMapper fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -408,9 +408,9 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); - fieldMapper = stage1.mappers().getMapper("shape"); + fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java index 70a714b29ec..d2065f439cb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -94,33 +94,16 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser(); - DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(mappingWithIndexEnabled)); - + MapperService mapperService = createIndex("test", bwcSettings).mapperService(); + DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(mappingWithIndexEnabled), true, false); + assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(true)); String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(mappingWithIndexDisabled)); + DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mappingWithIndexDisabled), false, false); - mapperEnabled.merge(mapperDisabled.mapping(), false, false); - assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false)); - } - - public void testThatDisablingWorksWhenMergingBackcompat() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser(); - DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); - - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); - - enabledMapper.merge(disabledMapper.mapping(), false, false); - assertThat(enabledMapper.indexMapper().enabled(), is(false)); + assertThat(merged.IndexFieldMapper().enabled(), is(false)); } public void testCustomSettingsBackcompat() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index d6c2cbf2e9f..e05bc21ba89 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -187,15 +187,13 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); - DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(disabledMapping)); - mapperEnabled.merge(mapperDisabled.mapping(), false, false); - assertFalse(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); + DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + assertFalse(mapperDisabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); - mapperDisabled.merge(mapperEnabled.mapping(), false, false); + mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), false, false); assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java index 656599c5036..d171430dfff 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java @@ -59,6 +59,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { .bytes()); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + mapper = index.mapperService().documentMapper("type"); writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index d86a93e3f1b..80f7942bbcc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -59,15 +59,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); - stage1.merge(stage2.mapping(), true, false); - // since we are simulating, we should not have the age mapping + DocumentMapper merged = stage1.merge(stage2.mapping(), false); + // stage1 mapping should not have been modified assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); - // now merge, don't simulate - stage1.merge(stage2.mapping(), false, false); - // but we have the age in - assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue()); - assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); + // but merged should + assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue()); + assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); } public void testMergeObjectDynamic() throws Exception { @@ -80,8 +78,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); - mapper.merge(withDynamicMapper.mapping(), false, false); - assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); + DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), false); + assertThat(merged.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); } public void testMergeObjectAndNested() throws Exception { @@ -96,14 +94,14 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); try { - objectMapper.merge(nestedMapper.mapping(), true, false); + objectMapper.merge(nestedMapper.mapping(), false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested")); } try { - nestedMapper.merge(objectMapper.mapping(), true, false); + nestedMapper.merge(objectMapper.mapping(), false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested")); @@ -123,13 +121,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - existing.merge(changed.mapping(), false, false); + DocumentMapper merged = existing.merge(changed.mapping(), false); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); + assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); } public void testChangeSearchAnalyzerToDefault() throws Exception { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() .endObject().endObject().string(); @@ -137,14 +135,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); - DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); + DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), true, false); + DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), false, false); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - existing.merge(changed.mapping(), false, false); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); - assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); + assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); + assertThat(((StringFieldMapper) (merged.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); } public void testConcurrentMergeTest() throws Throwable { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 8a66a78f3ac..3c301e93fa5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -150,19 +150,17 @@ public class MultiFieldTests extends ESSingleNodeTestCase { public void testBuildThenParse() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper builderDocMapper = doc(settings, rootObject("person").add( + DocumentMapper builderDocMapper = doc(rootObject("person").add( stringField("name").store(true) .addMultiField(stringField("indexed").index(true).tokenized(true)) .addMultiField(stringField("not_indexed").index(false).store(true)) - ), indexService.mapperService()).build(indexService.mapperService(), mapperParser); + ), indexService.mapperService()).build(indexService.mapperService()); String builtMapping = builderDocMapper.mappingSource().string(); // System.out.println(builtMapping); // reparse it - DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); + DocumentMapper docMapper = indexService.mapperService().documentMapperParser().parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 1a999a64018..651b8c45d55 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -41,9 +40,9 @@ import static org.hamcrest.Matchers.nullValue; public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -56,11 +55,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - DocumentMapper docMapper2 = parser.parse("person", new CompressedXContent(mapping)); - - docMapper.merge(docMapper2.mapping(), true, false); - - docMapper.merge(docMapper2.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -77,11 +72,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - DocumentMapper docMapper3 = parser.parse("person", new CompressedXContent(mapping)); - - docMapper.merge(docMapper3.mapping(), true, false); - - docMapper.merge(docMapper3.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -92,11 +83,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - DocumentMapper docMapper4 = parser.parse("person", new CompressedXContent(mapping)); - - docMapper.merge(docMapper4.mapping(), true, false); - - docMapper.merge(docMapper4.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -125,7 +112,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -142,7 +129,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index 14b6e7c7110..624978bf7d0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -74,6 +74,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); assertThat(mapper, instanceOf(LongFieldMapper.class)); @@ -98,6 +99,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get()); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); assertThat(mapper, instanceOf(StringFieldMapper.class)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index b04d3a64a25..230af8d3a50 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.simple; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -48,12 +47,10 @@ import static org.hamcrest.Matchers.equalTo; public class SimpleMapperTests extends ESSingleNodeTestCase { public void testSimpleMapper() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper docMapper = doc(settings, + DocumentMapper docMapper = doc( rootObject("person") .add(object("name").add(stringField("first").store(true).index(false))), - indexService.mapperService()).build(indexService.mapperService(), mapperParser); + indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); @@ -110,12 +107,10 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { public void testNoDocumentSent() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper docMapper = doc(settings, + DocumentMapper docMapper = doc( rootObject("person") .add(object("name").add(stringField("first").store(true).index(false))), - indexService.mapperService()).build(indexService.mapperService(), mapperParser); + indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); try { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index ea97aa6b8cf..35b127b6283 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -196,10 +196,10 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); docMapper = parser.parse("type", docMapper.mappingSource()); if (conflicts.length == 0) { - docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false); } else { try { - docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false); fail(); } catch (IllegalArgumentException e) { for (String conflict : conflicts) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index aac4b81aff2..218fc442224 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -478,7 +479,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + MapperService mapperService = indexService.mapperService(); + DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -492,7 +494,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), false, false); + defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -507,7 +509,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); try { - defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), true, false); + mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("different [omit_norms]")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 01d4c6a1234..c6a8a2fca44 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -143,17 +143,16 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); - enabledMapper.merge(disabledMapper.mapping(), false, false); - - assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false)); + assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); + assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); } // issue 3174 @@ -504,16 +503,16 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test", indexSettings).mapperService(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .endObject().endObject().string(); - docMapper.merge(parser.parse("type", new CompressedXContent(mapping)).mapping(), false, false); + docMapper = mapperService.merge("type", new CompressedXContent(mapping), false, false); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); } @@ -571,8 +570,8 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("fielddata").field("format", "array").endObject() .field("store", "no") .field("index", "no") - .field("path", "bar") - .field("default", "1970-01-02") + .field("path", "foo") + .field("default", "1970-01-01") .endObject() .endObject().endObject().string(); @@ -584,6 +583,24 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values")); } + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_timestamp").field("enabled", false) + .startObject("fielddata").field("format", "array").endObject() + .field("store", "yes") + .field("index", "analyzed") + .field("path", "bar") + .field("default", "1970-01-02") + .endObject() + .endObject().endObject().string(); + + try { + mapperService.merge("type", new CompressedXContent(mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value")); + assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value")); + } + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertTrue(docMapper.timestampFieldMapper().enabled()); @@ -650,7 +667,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { public void testBackcompatMergePaths() throws Exception { String[] possiblePathValues = {"some_path", "anotherPath", null}; - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService(); XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("_timestamp"); @@ -670,21 +687,17 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { mapping2.endObject() .endObject().endObject(); - assertConflict(mapping1.string(), mapping2.string(), parser, (path1 == path2 ? null : "Cannot update path in _timestamp value")); + assertConflict(mapperService, "type", mapping1.string(), mapping2.string(), (path1 == path2 ? null : "Cannot update path in _timestamp value")); } - void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException { - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); - docMapper = parser.parse("type", docMapper.mappingSource()); - if (conflict == null) { - docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); - } else { - try { - docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString(conflict)); - } + void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { + mapperService.merge("type", new CompressedXContent(mapping1), true, false); + try { + mapperService.merge("type", new CompressedXContent(mapping2), false, false); + assertNull(conflict); + } catch (IllegalArgumentException e) { + assertNotNull(conflict); + assertThat(e.getMessage(), containsString(conflict)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index c43a7f1ddc5..10eeecf6749 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -33,8 +33,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; @@ -111,13 +111,12 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapperWithoutTtl = parser.parse("type", new CompressedXContent(mappingWithoutTtl)); - DocumentMapper mapperWithTtl = parser.parse("type", new CompressedXContent(mappingWithTtl)); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), true, false); + DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), false, false); - mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); - - assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true)); + assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); + assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); } public void testThatChangingTTLKeepsMapperEnabled() throws Exception { @@ -135,24 +134,22 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); - DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(updatedMapping)); - - initialMapper.merge(updatedMapper.mapping(), true, false); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); + DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); + assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); } public void testThatDisablingTTLReportsConflict() throws Exception { String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); - DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(mappingWithTtlDisabled)); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); try { - initialMapper.merge(updatedMapper.mapping(), true, false); + mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), false, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -190,20 +187,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), randomBoolean(), false); } public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), randomBoolean(), false); } public void testMergeWithOnlyDefaultSet() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -214,65 +211,11 @@ public class TTLMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } - public void testThatSimulatedMergingLeavesStateUntouched() throws Exception { - //check if default ttl changed when simulate set to true - XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d"); - IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); - CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false); - // make sure simulate flag actually worked - no mappings applied - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if enabled changed when simulate set to true - XContentBuilder mappingWithoutTtl = getMappingWithTtlDisabled(); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - // make sure simulate flag actually worked - no mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if enabled changed when simulate set to true - mappingWithoutTtl = getMappingWithTtlDisabled("6d"); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - // make sure simulate flag actually worked - no mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if switching simulate flag off works - mappingWithoutTtl = getMappingWithTtlDisabled("6d"); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - // make sure simulate flag actually worked - mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); - - client().admin().indices().prepareDelete("testindex").get(); - // check if switching simulate flag off works if nothing was applied in the beginning - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - // make sure simulate flag actually worked - mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); - - } - public void testIncludeInObjectBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", true).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index f73ad3e3b3f..9510c6749eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -76,7 +76,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), false, false); // make sure mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); @@ -99,7 +99,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping try { - indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), true, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -123,14 +123,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { mapperService.merge("type", new CompressedXContent(update.string()), false, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } try { mapperService.merge("type", new CompressedXContent(update.string()), false, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 32c9d3ed621..c6e9796ab60 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -149,7 +149,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet(); fail("Expected MergeMappingException"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]")); + assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [string], merged_type [integer]")); } } diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json index 252aafefb08..bbeafa7edc7 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json @@ -2,7 +2,7 @@ "type": { "_all": { "store": false, - "enabled": false, + "enabled": true, "store_term_vectors": false, "store_term_vector_offsets": false, "store_term_vector_positions": false, diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index f9dcee1efe9..cc4eb682c8a 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -134,14 +134,13 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); + DocumentMapper enabledMapper = indexService.mapperService().merge("type", new CompressedXContent(enabledMapping), true, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); + DocumentMapper disabledMapper = indexService.mapperService().merge("type", new CompressedXContent(disabledMapping), false, false); - enabledMapper.merge(disabledMapper.mapping(), false, false); - assertThat(enabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); + assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } } From 56d2dd701e2165ed5161cc47ee6ba651b4c9470e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 23 Dec 2015 10:48:00 +0100 Subject: [PATCH 233/322] Fix SizeMappingTests failure. --- .../index/mapper/size/SizeMappingTests.java | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index cc4eb682c8a..403eb284f96 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -28,13 +28,18 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -43,18 +48,18 @@ import static org.hamcrest.Matchers.nullValue; public class SizeMappingTests extends ESSingleNodeTestCase { - MapperRegistry mapperRegistry; IndexService indexService; + MapperService mapperService; DocumentMapperParser parser; @Before public void before() { indexService = createIndex("test"); - mapperRegistry = new MapperRegistry( - Collections.emptyMap(), - Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry); + Map metadataMappers = new HashMap<>(); + IndicesModule indices = new IndicesModule(); + indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); + mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry()); + parser = mapperService.documentMapperParser(); } public void testSizeEnabled() throws Exception { @@ -81,10 +86,10 @@ public class SizeMappingTests extends ESSingleNodeTestCase { Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); indexService = createIndex("test2", indexSettings); - mapperRegistry = new MapperRegistry( + MapperRegistry mapperRegistry = new MapperRegistry( Collections.emptyMap(), Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), + parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, indexService.analysisService(), indexService.similarityService(), mapperRegistry); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); @@ -134,12 +139,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = indexService.mapperService().merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = indexService.mapperService().merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } From 9cd618244af12282ce4c47d75bdb43b787c186bd Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 23 Dec 2015 10:59:38 +0100 Subject: [PATCH 234/322] Docs: Remove deprecated documentation flag in profile docs --- docs/reference/search/profile.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index b62d83eee6b..6e11b4c7650 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1,8 +1,6 @@ [[search-profile]] == Profile API -coming[2.2.0] - experimental[] The Profile API provides detailed timing information about the execution of individual components From 07658f58a82266286f4946698d4f2c4baa14d597 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 22 Dec 2015 10:04:17 +0100 Subject: [PATCH 235/322] FunctionScoreQuery should implement two-phase iteration. FunctionScoreQuery should do two things that it doesn't do today: - propagate the two-phase iterator from the wrapped scorer so that things are still executed efficiently eg. if a phrase or geo-distance query is wrapped - filter out docs that don't have a high enough score using two-phase iteration: this way the score is only checked when everything else matches While doing these changes, I noticed that minScore was ignored when scores were not needed and that explain did not take it into account, so I fixed these issues as well. --- .../function/CustomBoostFactorScorer.java | 152 --------------- .../function/FiltersFunctionScoreQuery.java | 70 ++++--- .../search/function/FunctionScoreQuery.java | 42 +++-- .../search/function/MinScoreScorer.java | 95 ++++++++++ .../search/function/MinScoreScorerTests.java | 173 ++++++++++++++++++ .../FunctionScoreEquivalenceTests.java | 71 +++++++ .../functionscore/FunctionScoreTests.java | 52 ++++++ 7 files changed, 462 insertions(+), 193 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java create mode 100644 core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java create mode 100644 core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreEquivalenceTests.java diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java deleted file mode 100644 index 0e077804974..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene.search.function; - -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; - -import java.io.IOException; - -abstract class CustomBoostFactorScorer extends Scorer { - - final Scorer scorer; - final DocIdSetIterator iterator; - final float maxBoost; - final CombineFunction scoreCombiner; - - Float minScore; - NextDoc nextDoc; - - CustomBoostFactorScorer(Weight w, Scorer scorer, float maxBoost, CombineFunction scoreCombiner, Float minScore) - throws IOException { - super(w); - if (minScore == null) { - nextDoc = new AnyNextDoc(); - } else { - nextDoc = new MinScoreNextDoc(); - } - this.scorer = scorer; - this.iterator = scorer.iterator(); - this.maxBoost = maxBoost; - this.scoreCombiner = scoreCombiner; - this.minScore = minScore; - } - - @Override - public int docID() { - return scorer.docID(); - } - - @Override - public DocIdSetIterator iterator() { - return new DocIdSetIterator() { - @Override - public int nextDoc() throws IOException { - return nextDoc.nextDoc(); - } - @Override - public int advance(int target) throws IOException { - return nextDoc.advance(target); - } - @Override - public long cost() { - return iterator.cost(); - } - @Override - public int docID() { - return iterator.docID(); - } - }; - } - - public abstract float innerScore() throws IOException; - - @Override - public float score() throws IOException { - return nextDoc.score(); - } - - @Override - public int freq() throws IOException { - return scorer.freq(); - } - - public interface NextDoc { - public int advance(int target) throws IOException; - - public int nextDoc() throws IOException; - - public float score() throws IOException; - } - - public class MinScoreNextDoc implements NextDoc { - float currentScore = Float.MAX_VALUE * -1.0f; - - @Override - public int nextDoc() throws IOException { - int doc; - do { - doc = iterator.nextDoc(); - if (doc == DocIdSetIterator.NO_MORE_DOCS) { - return doc; - } - currentScore = innerScore(); - } while (currentScore < minScore); - return doc; - } - - @Override - public float score() throws IOException { - return currentScore; - } - - @Override - public int advance(int target) throws IOException { - int doc = iterator.advance(target); - if (doc == DocIdSetIterator.NO_MORE_DOCS) { - return doc; - } - currentScore = innerScore(); - if (currentScore < minScore) { - return iterator.nextDoc(); - } - return doc; - } - } - - public class AnyNextDoc implements NextDoc { - - @Override - public int nextDoc() throws IOException { - return iterator.nextDoc(); - } - - @Override - public float score() throws IOException { - return innerScore(); - } - - @Override - public int advance(int target) throws IOException { - return iterator.advance(target); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 4a6ee941f4f..3486690e270 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -142,7 +143,7 @@ public class FiltersFunctionScoreQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - if (needsScores == false) { + if (needsScores == false && minScore == null) { return subQuery.createWeight(searcher, needsScores); } @@ -184,11 +185,7 @@ public class FiltersFunctionScoreQuery extends Query { subQueryWeight.normalize(norm, boost); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - // we ignore scoreDocsInOrder parameter, because we need to score in - // order if documents are scored with a script. The - // ShardLookup depends on in order scoring. + private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; @@ -201,15 +198,24 @@ public class FiltersFunctionScoreQuery extends Query { Scorer filterScorer = filterWeights[i].scorer(context); docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); } - return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore, needsScores); + return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + Scorer scorer = functionScorer(context); + if (scorer != null && minScore != null) { + scorer = new MinScoreScorer(this, scorer, minScore); + } + return scorer; } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Explanation subQueryExpl = subQueryWeight.explain(context, doc); - if (!subQueryExpl.isMatch()) { - return subQueryExpl; + Explanation expl = subQueryWeight.explain(context, doc); + if (!expl.isMatch()) { + return expl; } // First: Gather explanations for all filters List filterExplanations = new ArrayList<>(); @@ -218,7 +224,7 @@ public class FiltersFunctionScoreQuery extends Query { filterWeights[i].scorer(context)); if (docSet.get(doc)) { FilterFunction filterFunction = filterFunctions[i]; - Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); + Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, expl); double factor = functionExplanation.getValue(); float sc = CombineFunction.toFloat(factor); Explanation filterExplanation = Explanation.match(sc, "function score, product of:", @@ -226,46 +232,52 @@ public class FiltersFunctionScoreQuery extends Query { filterExplanations.add(filterExplanation); } } - if (filterExplanations.size() == 0) { - return subQueryExpl; + if (filterExplanations.size() > 0) { + FiltersFunctionFactorScorer scorer = functionScorer(context); + int actualDoc = scorer.iterator().advance(doc); + assert (actualDoc == doc); + double score = scorer.computeScore(doc, expl.getValue()); + Explanation factorExplanation = Explanation.match( + CombineFunction.toFloat(score), + "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", + filterExplanations); + expl = combineFunction.explain(expl, factorExplanation, maxBoost); } - - FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context); - int actualDoc = scorer.iterator.advance(doc); - assert (actualDoc == doc); - double score = scorer.computeScore(doc, subQueryExpl.getValue()); - Explanation factorExplanation = Explanation.match( - CombineFunction.toFloat(score), - "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", - filterExplanations); - return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost); + if (minScore != null && minScore > expl.getValue()) { + expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); + } + return expl; } } - static class FiltersFunctionFactorScorer extends CustomBoostFactorScorer { + static class FiltersFunctionFactorScorer extends FilterScorer { private final FilterFunction[] filterFunctions; private final ScoreMode scoreMode; private final LeafScoreFunction[] functions; private final Bits[] docSets; + private final CombineFunction scoreCombiner; + private final float maxBoost; private final boolean needsScores; private FiltersFunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions, - float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, Float minScore, boolean needsScores) throws IOException { - super(w, scorer, maxBoost, scoreCombiner, minScore); + float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException { + super(scorer, w); this.scoreMode = scoreMode; this.filterFunctions = filterFunctions; this.functions = functions; this.docSets = docSets; + this.scoreCombiner = scoreCombiner; + this.maxBoost = maxBoost; this.needsScores = needsScores; } @Override - public float innerScore() throws IOException { - int docId = scorer.docID(); + public float score() throws IOException { + int docId = docID(); // Even if the weight is created with needsScores=false, it might // be costly to call score(), so we explicitly check if scores // are needed - float subQueryScore = needsScores ? scorer.score() : 0f; + float subQueryScore = needsScores ? super.score() : 0f; double factor = computeScore(docId, subQueryScore); return scoreCombiner.combine(subQueryScore, factor, maxBoost); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index b94da9d8b70..3cf4f3e48f7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -90,7 +91,7 @@ public class FunctionScoreQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - if (needsScores == false) { + if (needsScores == false && minScore == null) { return subQuery.createWeight(searcher, needsScores); } @@ -128,8 +129,7 @@ public class FunctionScoreQuery extends Query { subQueryWeight.normalize(norm, boost); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; @@ -138,7 +138,16 @@ public class FunctionScoreQuery extends Query { if (function != null) { leafFunction = function.getLeafScoreFunction(context); } - return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, minScore, needsScores); + return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, needsScores); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + Scorer scorer = functionScorer(context); + if (scorer != null && minScore != null) { + scorer = new MinScoreScorer(this, scorer, minScore); + } + return scorer; } @Override @@ -147,38 +156,47 @@ public class FunctionScoreQuery extends Query { if (!subQueryExpl.isMatch()) { return subQueryExpl; } + Explanation expl; if (function != null) { Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); - return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost); + expl = combineFunction.explain(subQueryExpl, functionExplanation, maxBoost); } else { - return subQueryExpl; + expl = subQueryExpl; } + if (minScore != null && minScore > expl.getValue()) { + expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); + } + return expl; } } - static class FunctionFactorScorer extends CustomBoostFactorScorer { + static class FunctionFactorScorer extends FilterScorer { private final LeafScoreFunction function; private final boolean needsScores; + private final CombineFunction scoreCombiner; + private final float maxBoost; - private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, Float minScore, boolean needsScores) + private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, boolean needsScores) throws IOException { - super(w, scorer, maxBoost, scoreCombiner, minScore); + super(scorer, w); this.function = function; + this.scoreCombiner = scoreCombiner; + this.maxBoost = maxBoost; this.needsScores = needsScores; } @Override - public float innerScore() throws IOException { + public float score() throws IOException { // Even if the weight is created with needsScores=false, it might // be costly to call score(), so we explicitly check if scores // are needed - float score = needsScores ? scorer.score() : 0f; + float score = needsScores ? super.score() : 0f; if (function == null) { return score; } else { return scoreCombiner.combine(score, - function.score(scorer.docID(), score), maxBoost); + function.score(docID(), score), maxBoost); } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java new file mode 100644 index 00000000000..b4b87bda6d8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search.function; + +import java.io.IOException; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; + +/** A {@link Scorer} that filters out documents that have a score that is + * lower than a configured constant. */ +final class MinScoreScorer extends Scorer { + + private final Scorer in; + private final float minScore; + + MinScoreScorer(Weight weight, Scorer scorer, float minScore) { + super(weight); + if (scorer instanceof ScoreCachingWrappingScorer == false) { + // when minScore is set, scores might be requested twice: once + // to verify the match, and once by the collector + scorer = new ScoreCachingWrappingScorer(scorer); + } + this.in = scorer; + this.minScore = minScore; + } + + public Scorer getScorer() { + return in; + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public float score() throws IOException { + return in.score(); + } + + @Override + public int freq() throws IOException { + return in.freq(); + } + + @Override + public DocIdSetIterator iterator() { + return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); + } + + @Override + public TwoPhaseIterator twoPhaseIterator() { + final TwoPhaseIterator inTwoPhase = this.in.twoPhaseIterator(); + final DocIdSetIterator approximation = inTwoPhase == null ? in.iterator() : inTwoPhase.approximation(); + return new TwoPhaseIterator(approximation) { + + @Override + public boolean matches() throws IOException { + // we need to check the two-phase iterator first + // otherwise calling score() is illegal + if (inTwoPhase != null && inTwoPhase.matches() == false) { + return false; + } + return in.score() >= minScore; + } + + @Override + public float matchCost() { + return 1000f // random constant for the score computation + + (inTwoPhase == null ? 0 : inTwoPhase.matchCost()); + } + }; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java new file mode 100644 index 00000000000..de7a32b2357 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search.function; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +public class MinScoreScorerTests extends LuceneTestCase { + + private static DocIdSetIterator iterator(final int... docs) { + return new DocIdSetIterator() { + + int i = -1; + + @Override + public int nextDoc() throws IOException { + if (i + 1 == docs.length) { + return NO_MORE_DOCS; + } else { + return docs[++i]; + } + } + + @Override + public int docID() { + return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i]; + } + + @Override + public long cost() { + return docs.length; + } + + @Override + public int advance(int target) throws IOException { + return slowAdvance(target); + } + }; + } + + private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { + final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); + return new Scorer(null) { + public DocIdSetIterator iterator() { + if (twoPhase) { + return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); + } else { + return iterator; + } + } + + public TwoPhaseIterator twoPhaseIterator() { + if (twoPhase) { + return new TwoPhaseIterator(iterator) { + + @Override + public boolean matches() throws IOException { + return Arrays.binarySearch(docs, iterator.docID()) >= 0; + } + + @Override + public float matchCost() { + return 10; + } + }; + } else { + return null; + } + } + + @Override + public int docID() { + return iterator.docID(); + } + + @Override + public float score() throws IOException { + final int idx = Arrays.binarySearch(docs, docID()); + return scores[idx]; + } + + @Override + public int freq() throws IOException { + return 1; + } + }; + } + + public void doTestRandom(boolean twoPhase) throws IOException { + final int maxDoc = TestUtil.nextInt(random(), 10, 10000); + final int numDocs = TestUtil.nextInt(random(), 1, maxDoc / 2); + final Set uniqueDocs = new HashSet<>(); + while (uniqueDocs.size() < numDocs) { + uniqueDocs.add(random().nextInt(maxDoc)); + } + final int[] docs = new int[numDocs]; + int i = 0; + for (int doc : uniqueDocs) { + docs[i++] = doc; + } + Arrays.sort(docs); + final float[] scores = new float[numDocs]; + for (i = 0; i < numDocs; ++i) { + scores[i] = random().nextFloat(); + } + Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); + final float minScore = random().nextFloat(); + Scorer minScoreScorer = new MinScoreScorer(null, scorer, minScore); + int doc = -1; + while (doc != DocIdSetIterator.NO_MORE_DOCS) { + final int target; + if (random().nextBoolean()) { + target = doc + 1; + doc = minScoreScorer.iterator().nextDoc(); + } else { + target = doc + TestUtil.nextInt(random(), 1, 10); + doc = minScoreScorer.iterator().advance(target); + } + int idx = Arrays.binarySearch(docs, target); + if (idx < 0) { + idx = -1 - idx; + } + while (idx < docs.length && scores[idx] < minScore) { + idx += 1; + } + if (idx == docs.length) { + assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc); + } else { + assertEquals(docs[idx], doc); + assertEquals(scores[idx], scorer.score(), 0f); + } + } + } + + public void testRegularIterator() throws IOException { + final int iters = atLeast(5); + for (int iter = 0; iter < iters; ++iter) { + doTestRandom(false); + } + } + + public void testTwoPhaseIterator() throws IOException { + final int iters = atLeast(5); + for (int iter = 0; iter < iters; ++iter) { + doTestRandom(true); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreEquivalenceTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreEquivalenceTests.java new file mode 100644 index 00000000000..768a8595d08 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreEquivalenceTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.query.functionscore; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.search.SearchEquivalenceTestBase; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.FilterFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; + +public class FunctionScoreEquivalenceTests extends SearchEquivalenceTestBase { + + public void testMinScoreAllIncluded() throws Exception { + Term term = randomTerm(); + Query query = new TermQuery(term); + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY); + assertSameScores(query, fsq); + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY); + assertSameScores(query, ffsq); + } + + public void testMinScoreAllExcluded() throws Exception { + Term term = randomTerm(); + Query query = new TermQuery(term); + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, Float.POSITIVE_INFINITY, null, Float.POSITIVE_INFINITY); + assertSameScores(new MatchNoDocsQuery(), fsq); + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY, CombineFunction.MULTIPLY); + assertSameScores(new MatchNoDocsQuery(), ffsq); + } + + public void testTwoPhaseMinScore() throws Exception { + Term term = randomTerm(); + Query query = new TermQuery(term); + Float minScore = random().nextFloat(); + + FunctionScoreQuery fsq1 = new FunctionScoreQuery(query, null, minScore, null, Float.POSITIVE_INFINITY); + FunctionScoreQuery fsq2 = new FunctionScoreQuery(new RandomApproximationQuery(query, random()), null, minScore, null, Float.POSITIVE_INFINITY); + assertSameScores(fsq1, fsq2); + + FiltersFunctionScoreQuery ffsq1 = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, minScore, CombineFunction.MULTIPLY); + FiltersFunctionScoreQuery ffsq2 = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, minScore, CombineFunction.MULTIPLY); + assertSameScores(ffsq1, ffsq2); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index f671c97604d..51100360dd4 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -31,6 +31,9 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; @@ -46,6 +49,8 @@ import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.FilterFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; @@ -559,4 +564,51 @@ public class FunctionScoreTests extends ESTestCase { float score = topDocsWithWeights.scoreDocs[0].score; assertThat(score, equalTo(2.0f)); } + + public void testMinScoreExplain() throws IOException { + Query query = new MatchAllDocsQuery(); + Explanation queryExpl = searcher.explain(query, 0); + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY); + Explanation fsqExpl = searcher.explain(fsq, 0); + assertTrue(fsqExpl.isMatch()); + assertEquals(queryExpl.getValue(), fsqExpl.getValue(), 0f); + assertEquals(queryExpl.getDescription(), fsqExpl.getDescription()); + + fsq = new FunctionScoreQuery(query, null, 10f, null, Float.POSITIVE_INFINITY); + fsqExpl = searcher.explain(fsq, 0); + assertFalse(fsqExpl.isMatch()); + assertEquals("Score value is too low, expected at least 10.0 but got 1.0", fsqExpl.getDescription()); + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY); + Explanation ffsqExpl = searcher.explain(ffsq, 0); + assertTrue(ffsqExpl.isMatch()); + assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f); + assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription()); + + ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 10f, CombineFunction.MULTIPLY); + ffsqExpl = searcher.explain(ffsq, 0); + assertFalse(ffsqExpl.isMatch()); + assertEquals("Score value is too low, expected at least 10.0 but got 1.0", ffsqExpl.getDescription()); + } + + public void testPropagatesApproximations() throws IOException { + Query query = new RandomApproximationQuery(new MatchAllDocsQuery(), random()); + IndexSearcher searcher = newSearcher(reader); + searcher.setQueryCache(null); // otherwise we could get a cached entry that does not have approximations + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, null, null, Float.POSITIVE_INFINITY); + for (boolean needsScores : new boolean[] {true, false}) { + Weight weight = searcher.createWeight(fsq, needsScores); + Scorer scorer = weight.scorer(reader.leaves().get(0)); + assertNotNull(scorer.twoPhaseIterator()); + } + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, null, CombineFunction.MULTIPLY); + for (boolean needsScores : new boolean[] {true, false}) { + Weight weight = searcher.createWeight(ffsq, needsScores); + Scorer scorer = weight.scorer(reader.leaves().get(0)); + assertNotNull(scorer.twoPhaseIterator()); + } + } } From a0b946ccab19160d3e842b7a40dc20e75d4b7f18 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 23 Dec 2015 13:13:47 +0100 Subject: [PATCH 236/322] Smoke Tester: Dont install groovy/expression plugins Those two plugins are modules now and thus dont need to be installed explicitely when testing. --- dev-tools/smoke_test_rc.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 3fa61c4361f..667f57fbc85 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -67,8 +67,6 @@ DEFAULT_PLUGINS = ["analysis-icu", "discovery-ec2", "discovery-gce", "discovery-multicast", - "lang-expression", - "lang-groovy", "lang-javascript", "lang-plan-a", "lang-python", From 1e2919b03d067036cb2fceeff7b39b9fb278c86e Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 23 Dec 2015 13:24:47 +0100 Subject: [PATCH 237/322] Smoke Tester: Install mapper attachment & repository hdfs plugin --- dev-tools/smoke_test_rc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 667f57fbc85..5a94377407d 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -70,9 +70,11 @@ DEFAULT_PLUGINS = ["analysis-icu", "lang-javascript", "lang-plan-a", "lang-python", + "mapper-attachments", "mapper-murmur3", "mapper-size", "repository-azure", + "repository-hdfs", "repository-s3", "store-smb"] From d8d86668771d4ee5362872208c13afb26f6979f4 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 16 Dec 2015 19:41:34 +0100 Subject: [PATCH 238/322] Remove `index_name` back compat. Since 2.0 we enforce that fields have the same full and index names. So in 3.x we can remove the ability to have different names on the same field. --- .../classic/MapperQueryParser.java | 6 +- .../analyze/TransportAnalyzeAction.java | 4 +- .../TransportGetFieldMappingsIndexAction.java | 12 +- .../org/elasticsearch/index/IndexService.java | 8 +- .../PerFieldMappingPostingFormatCodec.java | 2 +- .../index/fielddata/IndexFieldData.java | 2 +- .../index/fielddata/IndexFieldDataCache.java | 5 +- .../fielddata/IndexFieldDataService.java | 25 ++-- .../index/fielddata/ShardFieldData.java | 13 +- .../BytesRefFieldComparatorSource.java | 2 +- .../DoubleValuesComparatorSource.java | 2 +- .../FloatValuesComparatorSource.java | 2 +- .../LongValuesComparatorSource.java | 2 +- .../ordinals/GlobalOrdinalsBuilder.java | 6 +- .../GlobalOrdinalsIndexFieldData.java | 10 +- .../InternalGlobalOrdinalsIndexFieldData.java | 4 +- .../AbstractGeoPointDVIndexFieldData.java | 15 +- .../plain/AbstractIndexFieldData.java | 15 +- .../plain/AbstractIndexGeoPointFieldData.java | 5 +- .../plain/AbstractIndexOrdinalsFieldData.java | 7 +- .../plain/BinaryDVIndexFieldData.java | 7 +- .../plain/BytesBinaryDVIndexFieldData.java | 11 +- .../plain/DisabledIndexFieldData.java | 9 +- .../plain/DocValuesIndexFieldData.java | 23 ++-- .../plain/GeoPointArrayIndexFieldData.java | 8 +- .../fielddata/plain/IndexIndexFieldData.java | 6 +- .../plain/PagedBytesIndexFieldData.java | 14 +- .../plain/ParentChildIndexFieldData.java | 11 +- .../plain/SortedNumericDVIndexFieldData.java | 5 +- .../SortedSetDVOrdinalsIndexFieldData.java | 11 +- .../index/fieldvisitor/FieldsVisitor.java | 4 +- .../fieldvisitor/SingleFieldsVisitor.java | 2 +- .../index/mapper/DocumentFieldMappers.java | 16 +-- .../index/mapper/FieldMapper.java | 32 +---- .../index/mapper/FieldTypeLookup.java | 89 ++---------- .../index/mapper/MappedFieldType.java | 129 +++++------------- .../index/mapper/MapperService.java | 34 +---- .../index/mapper/core/BinaryFieldMapper.java | 8 +- .../index/mapper/core/BooleanFieldMapper.java | 4 +- .../index/mapper/core/ByteFieldMapper.java | 10 +- .../mapper/core/CompletionFieldMapper.java | 16 +-- .../index/mapper/core/DateFieldMapper.java | 14 +- .../index/mapper/core/DoubleFieldMapper.java | 10 +- .../index/mapper/core/FloatFieldMapper.java | 10 +- .../index/mapper/core/IntegerFieldMapper.java | 10 +- .../index/mapper/core/LongFieldMapper.java | 10 +- .../index/mapper/core/NumberFieldMapper.java | 6 +- .../index/mapper/core/ShortFieldMapper.java | 10 +- .../index/mapper/core/StringFieldMapper.java | 10 +- .../mapper/core/TokenCountFieldMapper.java | 2 +- .../index/mapper/core/TypeParsers.java | 5 +- .../mapper/geo/BaseGeoPointFieldMapper.java | 10 +- .../index/mapper/geo/GeoPointFieldMapper.java | 2 +- .../mapper/geo/GeoPointFieldMapperLegacy.java | 10 +- .../index/mapper/geo/GeoShapeFieldMapper.java | 24 ++-- .../index/mapper/internal/AllFieldMapper.java | 6 +- .../internal/FieldNamesFieldMapper.java | 6 +- .../index/mapper/internal/IdFieldMapper.java | 6 +- .../mapper/internal/IndexFieldMapper.java | 6 +- .../mapper/internal/ParentFieldMapper.java | 16 +-- .../mapper/internal/RoutingFieldMapper.java | 8 +- .../mapper/internal/SourceFieldMapper.java | 4 +- .../index/mapper/internal/TTLFieldMapper.java | 2 +- .../mapper/internal/TimestampFieldMapper.java | 6 +- .../mapper/internal/TypeFieldMapper.java | 8 +- .../index/mapper/internal/UidFieldMapper.java | 4 +- .../mapper/internal/VersionFieldMapper.java | 2 +- .../index/mapper/ip/IpFieldMapper.java | 6 +- .../percolator/QueriesLoaderCollector.java | 2 +- .../index/query/CommonTermsQueryBuilder.java | 2 +- .../index/query/ExistsQueryBuilder.java | 2 +- .../query/FieldMaskingSpanQueryBuilder.java | 2 +- .../query/GeoBoundingBoxQueryBuilder.java | 2 +- .../index/query/GeoDistanceQueryBuilder.java | 2 +- .../query/GeoDistanceRangeQueryBuilder.java | 2 +- .../index/query/GeoPolygonQueryBuilder.java | 2 +- .../index/query/MoreLikeThisQueryBuilder.java | 2 +- .../index/query/QueryShardContext.java | 4 +- .../index/query/SimpleQueryStringBuilder.java | 2 +- .../index/query/SpanTermQueryBuilder.java | 2 +- .../index/query/TermsQueryBuilder.java | 2 +- .../index/query/WildcardQueryBuilder.java | 2 +- .../functionscore/DecayFunctionBuilder.java | 4 +- .../FieldValueFactorFunctionBuilder.java | 2 +- .../random/RandomScoreFunctionBuilder.java | 2 +- .../index/search/MatchQuery.java | 2 +- .../index/search/MultiMatchQuery.java | 2 +- .../search/geo/GeoDistanceRangeQuery.java | 8 +- .../index/search/geo/GeoPolygonQuery.java | 8 +- .../geo/InMemoryGeoBoundingBoxQuery.java | 4 +- .../geo/IndexedGeoBoundingBoxQuery.java | 2 +- .../index/similarity/SimilarityService.java | 2 +- .../index/termvectors/TermVectorsService.java | 6 +- .../cache/IndicesFieldDataCache.java | 19 ++- .../cache/IndicesFieldDataCacheListener.java | 5 +- .../indices/ttl/IndicesTTLService.java | 2 +- .../percolator/PercolateContext.java | 7 +- .../percolator/PercolatorService.java | 2 +- .../percolator/QueryCollector.java | 2 +- .../elasticsearch/search/SearchService.java | 18 +-- .../bucket/children/ChildrenParser.java | 2 +- .../support/ValuesSourceParser.java | 2 +- .../FieldDataFieldsFetchSubPhase.java | 2 +- .../highlight/FastVectorHighlighter.java | 6 +- .../search/highlight/HighlightUtils.java | 6 +- .../search/highlight/PlainHighlighter.java | 6 +- .../search/highlight/PostingsHighlighter.java | 2 +- .../FragmentBuilderHelper.java | 2 +- .../SourceScoreOrderFragmentsBuilder.java | 4 +- .../SourceSimpleFragmentsBuilder.java | 4 +- .../search/internal/DefaultSearchContext.java | 9 +- .../internal/FilteredSearchContext.java | 5 - .../search/internal/SearchContext.java | 4 +- .../search/lookup/FieldLookup.java | 4 +- .../search/lookup/LeafDocLookup.java | 4 +- .../search/lookup/LeafFieldsLookup.java | 6 +- .../search/sort/SortParseElement.java | 2 +- .../completion/CompletionSuggestParser.java | 4 +- .../completion/CompletionSuggester.java | 2 +- .../suggest/phrase/PhraseSuggestParser.java | 4 +- .../index/fielddata/FieldDataCacheTests.java | 6 +- .../fielddata/IndexFieldDataServiceTests.java | 11 +- .../NoOrdinalsStringFieldDataTests.java | 5 +- .../mapper/DocumentFieldMapperTests.java | 4 +- .../index/mapper/FieldTypeLookupTests.java | 90 +++--------- .../index/mapper/FieldTypeTestCase.java | 4 +- .../CompletionFieldMapperTests.java | 12 +- .../ExternalMetadataMapper.java | 2 +- .../internal/FieldNamesFieldMapperTests.java | 2 +- .../internal/ParentFieldMapperTests.java | 12 +- .../mapper/simple/SimpleMapperTests.java | 14 +- .../FieldMaskingSpanQueryBuilderTests.java | 2 +- .../index/query/RangeQueryBuilderTests.java | 2 +- .../functionscore/FunctionScoreTests.java | 9 +- .../index/shard/IndexShardTests.java | 2 +- .../CategoryContextMappingTests.java | 8 +- .../completion/GeoContextMappingTests.java | 8 +- .../expression/CountMethodValueSource.java | 2 +- .../expression/DateMethodValueSource.java | 2 +- .../ExpressionScriptEngineService.java | 2 +- .../expression/FieldDataValueSource.java | 2 +- .../messy/tests/GeoShapeIntegrationTests.java | 4 +- .../attachments/EncryptedDocMapperTests.java | 48 +++---- ...anguageDetectionAttachmentMapperTests.java | 4 +- .../attachments/MetadataMapperTests.java | 20 +-- .../SimpleAttachmentMapperTests.java | 40 ++---- .../mapper/attachments/StandaloneRunner.java | 2 +- .../mapper/attachments/VariousDocTests.java | 6 +- .../index/mapper/size/SizeFieldMapper.java | 2 +- .../elasticsearch/test/TestSearchContext.java | 13 +- 150 files changed, 533 insertions(+), 823 deletions(-) diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 744950ed4f6..89eedce09d8 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -226,7 +226,7 @@ public class MapperQueryParser extends QueryParser { } } if (query == null) { - query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted); + query = super.getFieldQuery(currentFieldType.name(), queryText, quoted); } return query; } @@ -466,7 +466,7 @@ public class MapperQueryParser extends QueryParser { query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context); } if (query == null) { - query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr); + query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr); } return query; } @@ -592,7 +592,7 @@ public class MapperQueryParser extends QueryParser { if (!settings.forceAnalyzer()) { setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } - indexedNameField = currentFieldType.names().indexName(); + indexedNameField = currentFieldType.name(); return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); } return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index f9f3321e511..0541ac31505 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -126,13 +126,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction remainingFieldMappers = newLinkedList(allFieldMappers); for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) { - addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { + addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) { - addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { + addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } @@ -214,7 +214,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc builder.startObject(); fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes())); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 21d688edd4c..100b8b7ae81 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -459,21 +459,21 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { if (shardId != null) { final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { - shard.fieldData().onCache(shardId, fieldNames, fieldDataType, ramUsage); + shard.fieldData().onCache(shardId, fieldName, fieldDataType, ramUsage); } } } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (shardId != null) { final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { - shard.fieldData().onRemoval(shardId, fieldNames, fieldDataType, wasEvicted, sizeInBytes); + shard.fieldData().onRemoval(shardId, fieldName, fieldDataType, wasEvicted, sizeInBytes); } } } diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 2c23f947475..7663a322be6 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -54,7 +54,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene54Codec { @Override public PostingsFormat getPostingsFormatForField(String field) { - final MappedFieldType indexName = mapperService.indexName(field); + final MappedFieldType indexName = mapperService.fullName(field); if (indexName == null) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index fb01a374d2f..ffa23bf56e4 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -80,7 +80,7 @@ public interface IndexFieldData extends IndexCompone /** * The field name. */ - MappedFieldType.Names getFieldNames(); + String getFieldName(); /** * The field data type. diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java index f14a0a6314b..7640a9be200 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.Accountable; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; /** @@ -49,12 +48,12 @@ public interface IndexFieldDataCache { /** * Called after the fielddata is loaded during the cache phase */ - void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); + void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage); /** * Called after the fielddata is unloaded */ - void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); + void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); } class None implements IndexFieldDataCache { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 39cd710c4ad..8ac0bda2f0b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; @@ -61,7 +60,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo public static final String FIELDDATA_CACHE_VALUE_NODE = "node"; private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { - throw new IllegalStateException("Can't load fielddata on [" + fieldType.names().fullName() + throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() + "] of index [" + indexProperties.getIndex().getName() + "] because fielddata is unsupported on fields of type [" + fieldType.fieldDataType().getType() + "]. Use doc values instead."); }; @@ -148,11 +147,11 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo private final MapperService mapperService; private static final IndexFieldDataCache.Listener DEFAULT_NOOP_LISTENER = new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { } @Override - public void onRemoval(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { } }; private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER; @@ -195,22 +194,22 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo @SuppressWarnings("unchecked") public > IFD getForField(MappedFieldType fieldType) { - final Names fieldNames = fieldType.names(); + final String fieldName = fieldType.name(); final FieldDataType type = fieldType.fieldDataType(); if (type == null) { - throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("found no fielddata type for field [" + fieldName + "]"); } final boolean docValues = fieldType.hasDocValues(); IndexFieldData.Builder builder = null; String format = type.getFormat(indexSettings.getSettings()); if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) { - logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format"); + logger.warn("field [" + fieldName + "] has no doc values, will use default field data format"); format = null; } if (format != null) { builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format)); if (builder == null) { - logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default"); + logger.warn("failed to find format [" + format + "] for field [" + fieldName + "], will use default"); } } if (builder == null && docValues) { @@ -220,24 +219,24 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo builder = buildersByType.get(type.getType()); } if (builder == null) { - throw new IllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType()); + throw new IllegalArgumentException("failed to find field data builder for field " + fieldName + ", and type " + type.getType()); } IndexFieldDataCache cache; synchronized (this) { - cache = fieldDataCaches.get(fieldNames.indexName()); + cache = fieldDataCaches.get(fieldName); if (cache == null) { // we default to node level cache, which in turn defaults to be unbounded // this means changing the node level settings is simple, just set the bounds there String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE)); if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { - cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldNames, type); + cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type); } else if ("none".equals(cacheType)){ cache = new IndexFieldDataCache.None(); } else { - throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]"); } - fieldDataCaches.put(fieldNames.indexName(), cache); + fieldDataCaches.put(fieldName, cache); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java index e646364ef13..bb31df75348 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import java.util.Map; @@ -52,16 +51,15 @@ public class ShardFieldData implements IndexFieldDataCache.Listener { } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { totalMetric.inc(ramUsage.ramBytesUsed()); - String keyFieldName = fieldNames.indexName(); - CounterMetric total = perFieldTotals.get(keyFieldName); + CounterMetric total = perFieldTotals.get(fieldName); if (total != null) { total.inc(ramUsage.ramBytesUsed()); } else { total = new CounterMetric(); total.inc(ramUsage.ramBytesUsed()); - CounterMetric prev = perFieldTotals.putIfAbsent(keyFieldName, total); + CounterMetric prev = perFieldTotals.putIfAbsent(fieldName, total); if (prev != null) { prev.inc(ramUsage.ramBytesUsed()); } @@ -69,15 +67,14 @@ public class ShardFieldData implements IndexFieldDataCache.Listener { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wasEvicted) { evictionsMetric.inc(); } if (sizeInBytes != -1) { totalMetric.dec(sizeInBytes); - String keyFieldName = fieldNames.indexName(); - CounterMetric total = perFieldTotals.get(keyFieldName); + CounterMetric total = perFieldTotals.get(fieldName); if (total != null) { total.dec(sizeInBytes); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 2ae3f950409..51f8f2b42bd 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -80,7 +80,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 5391345e793..4684399a23d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -65,7 +65,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 15628513e80..ba9b031cede 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -57,7 +57,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float dMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index fe84f8e8840..b2fd25e5445 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -56,7 +56,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final Long dMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java index 38b36a640cf..dc5041d24ef 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java @@ -64,12 +64,12 @@ public enum GlobalOrdinalsBuilder { if (logger.isDebugEnabled()) { logger.debug( "Global-ordinals[{}][{}] took {} ms", - indexFieldData.getFieldNames().fullName(), + indexFieldData.getFieldName(), ordinalMap.getValueCount(), TimeValue.nsecToMSec(System.nanoTime() - startTimeNS) ); } - return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(), + return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(), indexFieldData.getFieldDataType(), atomicFD, ordinalMap, memorySizeInBytes ); } @@ -103,7 +103,7 @@ public enum GlobalOrdinalsBuilder { subs[i] = atomicFD[i].getOrdinalsValues(); } final OrdinalMap ordinalMap = OrdinalMap.build(null, subs, PackedInts.DEFAULT); - return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(), + return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(), indexFieldData.getFieldDataType(), atomicFD, ordinalMap, 0 ); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index 4a8bd78bb4e..5e1a2b57401 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -40,13 +40,13 @@ import java.util.Collections; */ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable { - private final MappedFieldType.Names fieldNames; + private final String fieldName; private final FieldDataType fieldDataType; private final long memorySizeInBytes; - protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { + protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, long memorySizeInBytes) { super(indexSettings); - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.memorySizeInBytes = memorySizeInBytes; } @@ -67,8 +67,8 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen } @Override - public MappedFieldType.Names getFieldNames() { - return fieldNames; + public String getFieldName() { + return fieldName; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java index fc1b6db9758..297c8b0f30c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java @@ -37,8 +37,8 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel private final Atomic[] atomicReaders; - InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { - super(indexSettings, fieldNames, fieldDataType, memorySizeInBytes); + InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { + super(indexSettings, fieldName, fieldDataType, memorySizeInBytes); this.atomicReaders = new Atomic[segmentAfd.length]; for (int i = 0; i < segmentAfd.length; i++) { atomicReaders[i] = new Atomic(segmentAfd[i], ordinalMap, i); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index 237f147ebf0..3d4b6536b6c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.N import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -41,8 +40,8 @@ import java.io.IOException; public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData { - AbstractGeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + AbstractGeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override @@ -56,8 +55,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData { final boolean indexCreatedBefore2x; - public GeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) { - super(index, fieldNames, fieldDataType); + public GeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) { + super(index, fieldName, fieldDataType); this.indexCreatedBefore2x = indexCreatedBefore2x; } @@ -65,9 +64,9 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public AtomicGeoPointFieldData load(LeafReaderContext context) { try { if (indexCreatedBefore2x) { - return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName())); + return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); } - return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldNames.indexName())); + return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); } @@ -84,7 +83,7 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.names(), fieldType.fieldDataType(), + return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), fieldType.fieldDataType(), indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java index 23baeede1cb..151ee92058d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; -import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -39,20 +38,20 @@ import java.io.IOException; */ public abstract class AbstractIndexFieldData extends AbstractIndexComponent implements IndexFieldData { - private final MappedFieldType.Names fieldNames; + private final String fieldName; protected final FieldDataType fieldDataType; protected final IndexFieldDataCache cache; - public AbstractIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { + public AbstractIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) { super(indexSettings); - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.cache = cache; } @Override - public MappedFieldType.Names getFieldNames() { - return this.fieldNames; + public String getFieldName() { + return this.fieldName; } @Override @@ -62,12 +61,12 @@ public abstract class AbstractIndexFieldData extends @Override public void clear() { - cache.clear(fieldNames.indexName()); + cache.clear(fieldName); } @Override public FD load(LeafReaderContext context) { - if (context.reader().getFieldInfos().fieldInfo(fieldNames.indexName()) == null) { + if (context.reader().getFieldInfos().fieldInfo(fieldName) == null) { // Some leaf readers may be wrapped and report different set of fields and use the same cache key. // If a field can't be found then it doesn't mean it isn't there, // so if a field doesn't exist then we don't cache it and just return an empty field data instance. diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index e4a043852cb..a8114c41f9b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; @@ -92,8 +91,8 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData groups = fieldDataType.getSettings().getGroups("filter"); frequency = groups.get("frequency"); regex = groups.get("regex"); @@ -74,7 +73,7 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD } boolean fieldFound = false; for (LeafReaderContext context : indexReader.leaves()) { - if (context.reader().getFieldInfos().fieldInfo(getFieldNames().indexName()) != null) { + if (context.reader().getFieldInfos().fieldInfo(getFieldName()) != null) { fieldFound = true; break; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java index 2e03b74a41f..c2a50942566 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java @@ -25,18 +25,17 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { - public BinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + public BinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override public BinaryDVAtomicFieldData load(LeafReaderContext context) { - return new BinaryDVAtomicFieldData(context.reader(), fieldNames.indexName()); + return new BinaryDVAtomicFieldData(context.reader(), fieldName); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index 28fae2c6385..988ecd61d65 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -38,8 +37,8 @@ import java.io.IOException; public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { - public BytesBinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + public BytesBinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override @@ -50,7 +49,7 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme @Override public BytesBinaryDVAtomicFieldData load(LeafReaderContext context) { try { - return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName())); + return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); } @@ -67,8 +66,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final Names fieldNames = fieldType.names(); - return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType()); + final String fieldName = fieldType.name(); + return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType()); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java index 859d720d072..86daaf1a252 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java @@ -27,7 +27,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -43,12 +42,12 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - return new DisabledIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache); + return new DisabledIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache); } } - public DisabledIndexFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { - super(indexSettings, fieldNames, fieldDataType, cache); + public DisabledIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) { + super(indexSettings, fieldName, fieldDataType, cache); } @Override @@ -67,7 +66,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - final Names fieldNames = fieldType.names(); + final String fieldName = fieldType.name(); final Settings fdSettings = fieldType.fieldDataType().getSettings(); final Map filter = fdSettings.getGroups("filter"); if (filter != null && !filter.isEmpty()) { - throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldName + "]"); } - if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { + if (BINARY_INDEX_FIELD_NAMES.contains(fieldName)) { assert numericType == null; - return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType()); + return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType()); } else if (numericType != null) { - return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldNames, numericType, fieldType.fieldDataType()); + return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldName, numericType, fieldType.fieldDataType()); } else { - return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldNames, breakerService, fieldType.fieldDataType()); + return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldName, breakerService, fieldType.fieldDataType()); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java index a0c0a55be71..495cc023e90 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java @@ -54,17 +54,17 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new GeoPointArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, + return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService, fieldType.fieldDataType().getSettings() .getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_2_2_0) || indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); } } - public GeoPointArrayIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService, final boolean indexCreatedBefore22) { - super(indexSettings, fieldNames, fieldDataType, cache); + super(indexSettings, fieldName, fieldDataType, cache); this.breakerService = breakerService; this.indexCreatedBefore22 = indexCreatedBefore22; } @@ -73,7 +73,7 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldNames().indexName()); + Terms terms = reader.terms(getFieldName()); AtomicGeoPointFieldData data = null; // TODO: Use an actual estimator to estimate before loading. NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA)); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index 471bea73570..f2c4fa826f1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -46,7 +46,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new IndexIndexFieldData(indexSettings, fieldType.names()); + return new IndexIndexFieldData(indexSettings, fieldType.name()); } } @@ -100,8 +100,8 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { private final AtomicOrdinalsFieldData atomicFieldData; - private IndexIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names names) { - super(indexSettings, names, new FieldDataType("string"), null, null); + private IndexIndexFieldData(IndexSettings indexSettings, String name) { + super(indexSettings, name, new FieldDataType("string"), null, null); atomicFieldData = new IndexAtomicFieldData(index().name()); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index cbf865a202d..ce4f5c55eac 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -57,13 +57,13 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PagedBytesIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService); + return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService); } } - public PagedBytesIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public PagedBytesIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { - super(indexSettings, fieldNames, fieldDataType, cache, breakerService); + super(indexSettings, fieldName, fieldDataType, cache, breakerService); } @Override @@ -71,8 +71,8 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { LeafReader reader = context.reader(); AtomicOrdinalsFieldData data = null; - PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldNames().fullName()); - Terms terms = reader.terms(getFieldNames().indexName()); + PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldName()); + Terms terms = reader.terms(getFieldName()); if (terms == null) { data = AbstractAtomicOrdinalsFieldData.empty(); estimator.afterLoad(null, data.ramBytesUsed()); @@ -167,10 +167,10 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { public long estimateStringFieldData() { try { LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldNames().indexName()); + Terms terms = reader.terms(getFieldName()); Fields fields = reader.fields(); - final Terms fieldTerms = fields.terms(getFieldNames().indexName()); + final Terms fieldTerms = fields.terms(getFieldName()); if (fieldTerms instanceof FieldReader) { final Stats stats = ((FieldReader) fieldTerms).getStats(); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index eba523e0e2e..14d0375ba57 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -75,10 +74,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData parentTypes; private final CircuitBreakerService breakerService; - public ParentChildIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public ParentChildIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService, CircuitBreakerService breakerService) { - super(indexSettings, fieldNames, fieldDataType, cache); + super(indexSettings, fieldName, fieldDataType, cache); this.breakerService = breakerService; Set parentTypes = new HashSet<>(); for (DocumentMapper mapper : mapperService.docMappers(false)) { @@ -147,7 +146,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData> entry : fields().entrySet()) { - MappedFieldType fieldType = mapperService.indexName(entry.getKey()); + MappedFieldType fieldType = mapperService.fullName(entry.getKey()); if (fieldType == null) { continue; } @@ -112,7 +112,7 @@ public class FieldsVisitor extends StoredFieldVisitor { if (fieldMapper == null) { // it's possible index name doesn't match field name (legacy feature) for (FieldMapper mapper : documentMapper.mappers()) { - if (mapper.fieldType().names().indexName().equals(indexName)) { + if (mapper.fieldType().name().equals(indexName)) { fieldMapper = mapper; break; } diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index d628ee4e961..a9880d59f65 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -69,7 +69,7 @@ public class SingleFieldsVisitor extends FieldsVisitor { if (fieldsValues == null) { return; } - List fieldValues = fieldsValues.get(fieldType.names().indexName()); + List fieldValues = fieldsValues.get(fieldType.name()); if (fieldValues == null) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index 90da570bbe6..57f2ff40530 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -58,9 +58,9 @@ public final class DocumentFieldMappers implements Iterable { for (FieldMapper mapper : mappers) { fieldMappers.put(mapper.name(), mapper); MappedFieldType fieldType = mapper.fieldType(); - put(indexAnalyzers, fieldType.names().indexName(), fieldType.indexAnalyzer(), defaultIndex); - put(searchAnalyzers, fieldType.names().indexName(), fieldType.searchAnalyzer(), defaultSearch); - put(searchQuoteAnalyzers, fieldType.names().indexName(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); + put(indexAnalyzers, fieldType.name(), fieldType.indexAnalyzer(), defaultIndex); + put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch); + put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); } this.fieldMappers = Collections.unmodifiableMap(fieldMappers); this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers); @@ -76,10 +76,10 @@ public final class DocumentFieldMappers implements Iterable { public Collection simpleMatchToFullName(String pattern) { Set fields = new HashSet<>(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { - fields.add(fieldMapper.fieldType().names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { - fields.add(fieldMapper.fieldType().names().fullName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { + fields.add(fieldMapper.fieldType().name()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { + fields.add(fieldMapper.fieldType().name()); } } return fields; @@ -91,7 +91,7 @@ public final class DocumentFieldMappers implements Iterable { return fieldMapper; } for (FieldMapper otherFieldMapper : this) { - if (otherFieldMapper.fieldType().names().indexName().equals(name)) { + if (otherFieldMapper.fieldType().name().equals(name)) { return otherFieldMapper; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9c77a416bf1..3ab0ec86303 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -217,31 +217,12 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return builder; } - protected MappedFieldType.Names buildNames(BuilderContext context) { - return new MappedFieldType.Names(buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); - } - - protected String buildIndexName(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - return buildFullName(context); - } - String actualIndexName = indexName == null ? name : indexName; - return context.path().pathAsText(actualIndexName); - } - - protected String buildIndexNameClean(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - return buildFullName(context); - } - return indexName == null ? name : indexName; - } - protected String buildFullName(BuilderContext context) { return context.path().pathAsText(name); } protected void setupFieldType(BuilderContext context) { - fieldType.setNames(buildNames(context)); + fieldType.setName(buildFullName(context)); if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); @@ -288,7 +269,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { @Override public String name() { - return fieldType().names().fullName(); + return fieldType().name(); } public MappedFieldType fieldType() { @@ -318,7 +299,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); } multiFields.parse(this, context); return null; @@ -367,7 +348,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { if (mergeWith instanceof FieldMapper) { mergedType = ((FieldMapper) mergeWith).contentType(); } - throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); } FieldMapper fieldMergeWith = (FieldMapper) mergeWith; multiFields = multiFields.merge(fieldMergeWith.multiFields); @@ -379,7 +360,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { - final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.names().fullName()); + final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name()); if (newFieldType == null) { throw new IllegalStateException(); } @@ -404,9 +385,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { builder.field("type", contentType()); - if (indexCreatedBefore2x && (includeDefaults || !simpleName().equals(fieldType().names().originalIndexName()))) { - builder.field("index_name", fieldType().names().originalIndexName()); - } if (includeDefaults || fieldType().boost() != 1.0f) { builder.field("boost", fieldType().boost()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index e06b4e799ed..5e9378e2f55 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -42,29 +42,17 @@ class FieldTypeLookup implements Iterable { /** Full field name to types containing a mapping for this full name. */ final CopyOnWriteHashMap> fullNameToTypes; - /** Index field name to field type */ - final CopyOnWriteHashMap indexNameToFieldType; - - /** Index field name to types containing a mapping for this index name. */ - final CopyOnWriteHashMap> indexNameToTypes; - /** Create a new empty instance. */ public FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); fullNameToTypes = new CopyOnWriteHashMap<>(); - indexNameToFieldType = new CopyOnWriteHashMap<>(); - indexNameToTypes = new CopyOnWriteHashMap<>(); } private FieldTypeLookup( CopyOnWriteHashMap fullName, - CopyOnWriteHashMap> fullNameToTypes, - CopyOnWriteHashMap indexName, - CopyOnWriteHashMap> indexNameToTypes) { + CopyOnWriteHashMap> fullNameToTypes) { this.fullNameToFieldType = fullName; this.fullNameToTypes = fullNameToTypes; - this.indexNameToFieldType = indexName; - this.indexNameToTypes = indexNameToTypes; } private static CopyOnWriteHashMap> addType(CopyOnWriteHashMap> map, String key, String type) { @@ -97,31 +85,21 @@ class FieldTypeLookup implements Iterable { CopyOnWriteHashMap fullName = this.fullNameToFieldType; CopyOnWriteHashMap> fullNameToTypes = this.fullNameToTypes; - CopyOnWriteHashMap indexName = this.indexNameToFieldType; - CopyOnWriteHashMap> indexNameToTypes = this.indexNameToTypes; for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); - MappedFieldType fullNameFieldType = fullName.get(fieldType.names().fullName()); - MappedFieldType indexNameFieldType = indexName.get(fieldType.names().indexName()); - - if (fullNameFieldType != null && indexNameFieldType != null && fullNameFieldType != indexNameFieldType) { - // this new field bridges between two existing field names (a full and index name), which we cannot support - throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName()); - } + MappedFieldType fullNameFieldType = fullName.get(fieldType.name()); // is the update even legal? checkCompatibility(type, fieldMapper, updateAllTypes); - if (fieldType != fullNameFieldType || fieldType != indexNameFieldType) { - fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldType()); - indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldType()); + if (fieldType != fullNameFieldType) { + fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); } - fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type); - indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type); + fullNameToTypes = addType(fullNameToTypes, fieldType.name(), type); } - return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes); + return new FieldTypeLookup(fullName, fullNameToTypes); } private static boolean beStrict(String type, Set types, boolean updateAllTypes) { @@ -142,26 +120,14 @@ class FieldTypeLookup implements Iterable { * If updateAllTypes is true, only basic compatibility is checked. */ private void checkCompatibility(String type, FieldMapper fieldMapper, boolean updateAllTypes) { - MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); + MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().name()); if (fieldType != null) { List conflicts = new ArrayList<>(); - final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); + final Set types = fullNameToTypes.get(fieldMapper.fieldType().name()); boolean strict = beStrict(type, types, updateAllTypes); fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict); if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); - } - } - - // field type for the index name must be compatible too - fieldType = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); - if (fieldType != null) { - List conflicts = new ArrayList<>(); - final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); - boolean strict = beStrict(type, types, updateAllTypes); - fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); + throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().name() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); } } } @@ -180,45 +146,16 @@ class FieldTypeLookup implements Iterable { return types; } - /** Returns the field type for the given index name */ - public MappedFieldType getByIndexName(String field) { - return indexNameToFieldType.get(field); - } - - /** Get the set of types that have a mapping for the given field. */ - public Set getTypesByIndexName(String field) { - Set types = indexNameToTypes.get(field); - if (types == null) { - types = Collections.emptySet(); - } - return types; - } - - /** - * Returns a list of the index names of a simple match regex like pattern against full name and index name. - */ - public Collection simpleMatchToIndexNames(String pattern) { - Set fields = new HashSet<>(); - for (MappedFieldType fieldType : this) { - if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { - fields.add(fieldType.names().indexName()); - } else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) { - fields.add(fieldType.names().indexName()); - } - } - return fields; - } - /** * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ public Collection simpleMatchToFullName(String pattern) { Set fields = new HashSet<>(); for (MappedFieldType fieldType : this) { - if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { - fields.add(fieldType.names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) { - fields.add(fieldType.names().fullName()); + if (Regex.simpleMatch(pattern, fieldType.name())) { + fields.add(fieldType.name()); + } else if (Regex.simpleMatch(pattern, fieldType.name())) { + fields.add(fieldType.name()); } } return fields; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index c1383817c79..5f8049b55fb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -53,68 +53,6 @@ import java.util.Objects; */ public abstract class MappedFieldType extends FieldType { - public static class Names { - - private final String indexName; - - private final String originalIndexName; - - private final String fullName; - - public Names(String name) { - this(name, name, name); - } - - public Names(String indexName, String originalIndexName, String fullName) { - this.indexName = indexName; - this.originalIndexName = originalIndexName; - this.fullName = fullName; - } - - /** - * The indexed name of the field. This is the name under which we will - * store it in the index. - */ - public String indexName() { - return indexName; - } - - /** - * The original index name, before any "path" modifications performed on it. - */ - public String originalIndexName() { - return originalIndexName; - } - - /** - * The full name, including dot path. - */ - public String fullName() { - return fullName; - } - - @Override - public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) return false; - - Names names = (Names) o; - - if (!fullName.equals(names.fullName)) return false; - if (!indexName.equals(names.indexName)) return false; - if (!originalIndexName.equals(names.originalIndexName)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = indexName.hashCode(); - result = 31 * result + originalIndexName.hashCode(); - result = 31 * result + fullName.hashCode(); - return result; - } - } - public enum Loading { LAZY { @Override @@ -155,7 +93,7 @@ public abstract class MappedFieldType extends FieldType { } } - private Names names; + private String name; private float boost; // TODO: remove this docvalues flag and use docValuesType private boolean docValues; @@ -170,7 +108,7 @@ public abstract class MappedFieldType extends FieldType { protected MappedFieldType(MappedFieldType ref) { super(ref); - this.names = ref.names(); + this.name = ref.name(); this.boost = ref.boost(); this.docValues = ref.hasDocValues(); this.indexAnalyzer = ref.indexAnalyzer(); @@ -214,7 +152,7 @@ public abstract class MappedFieldType extends FieldType { return boost == fieldType.boost && docValues == fieldType.docValues && - Objects.equals(names, fieldType.names) && + Objects.equals(name, fieldType.name) && Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) && Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) && Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) && @@ -226,7 +164,7 @@ public abstract class MappedFieldType extends FieldType { @Override public int hashCode() { - return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, + return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString); } @@ -238,7 +176,7 @@ public abstract class MappedFieldType extends FieldType { /** Checks this type is the same type as other. Adds a conflict if they are different. */ private final void checkTypeName(MappedFieldType other) { if (typeName().equals(other.typeName()) == false) { - throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); + throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); } else if (getClass() != other.getClass()) { throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName()); } @@ -256,71 +194,68 @@ public abstract class MappedFieldType extends FieldType { boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) { - conflicts.add("mapper [" + names().fullName() + "] has different [index] values"); + conflicts.add("mapper [" + name() + "] has different [index] values"); } if (stored() != other.stored()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store] values"); + conflicts.add("mapper [" + name() + "] has different [store] values"); } if (hasDocValues() == false && other.hasDocValues()) { // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set // when the doc_values field data format is configured - conflicts.add("mapper [" + names().fullName() + "] has different [doc_values] values, cannot change from disabled to enabled"); + conflicts.add("mapper [" + name() + "] has different [doc_values] values, cannot change from disabled to enabled"); } if (omitNorms() && !other.omitNorms()) { - conflicts.add("mapper [" + names().fullName() + "] has different [omit_norms] values, cannot change from disable to enabled"); + conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled"); } if (storeTermVectors() != other.storeTermVectors()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector] values"); } if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_offsets] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values"); } if (storeTermVectorPositions() != other.storeTermVectorPositions()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_positions] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values"); } if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_payloads] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values"); } // null and "default"-named index analyzers both mean the default is used if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) { if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } } else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } - if (!names().indexName().equals(other.names().indexName())) { - conflicts.add("mapper [" + names().fullName() + "] has different [index_name]"); - } if (Objects.equals(similarity(), other.similarity()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [similarity]"); + conflicts.add("mapper [" + name() + "] has different [similarity]"); } if (strict) { if (omitNorms() != other.omitNorms()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types."); } if (boost() != other.boost()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); } if (normsLoading() != other.normsLoading()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types."); } if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); } if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types."); } if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types."); } if (Objects.equals(nullValue(), other.nullValue()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types."); } } } @@ -333,13 +268,13 @@ public abstract class MappedFieldType extends FieldType { return true; } - public Names names() { - return names; + public String name() { + return name; } - public void setNames(Names names) { + public void setName(String name) { checkIfFrozen(); - this.names = names; + this.name = name; } public float boost() { @@ -456,7 +391,7 @@ public abstract class MappedFieldType extends FieldType { /** Creates a term associated with the field of this mapper for the given value */ protected Term createTerm(Object value) { - return new Term(names().indexName(), indexedValueForSearch(value)); + return new Term(name(), indexedValueForSearch(value)); } public Query termQuery(Object value, @Nullable QueryShardContext context) { @@ -468,11 +403,11 @@ public abstract class MappedFieldType extends FieldType { for (int i = 0; i < bytesRefs.length; i++) { bytesRefs[i] = indexedValueForSearch(values.get(i)); } - return new TermsQuery(names.indexName(), bytesRefs); + return new TermsQuery(name(), bytesRefs); } public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return new TermRangeQuery(names().indexName(), + return new TermRangeQuery(name(), lowerTerm == null ? null : indexedValueForSearch(lowerTerm), upperTerm == null ? null : indexedValueForSearch(upperTerm), includeLower, includeUpper); @@ -492,7 +427,7 @@ public abstract class MappedFieldType extends FieldType { public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { if (numericType() != null) { - throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + names.fullName + "]"); + throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + name + "]"); } RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index afaaca1b1ca..59724913bad 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -551,15 +551,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - /** - * Returns an {@link MappedFieldType} which has the given index name. - * - * If multiple types have fields with the same index name, the first is returned. - */ - public MappedFieldType indexName(String indexName) { - return fieldTypes.getByIndexName(indexName); - } - /** * Returns the {@link MappedFieldType} for the give fullName. * @@ -578,32 +569,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // no wildcards return Collections.singletonList(pattern); } - return fieldTypes.simpleMatchToIndexNames(pattern); + return fieldTypes.simpleMatchToFullName(pattern); } - // TODO: remove this since the underlying index names are now the same across all types - public Collection simpleMatchToIndexNames(String pattern, @Nullable String[] types) { - return simpleMatchToIndexNames(pattern); - } - - // TODO: remove types param, since the object mapper must be the same across all types - public ObjectMapper getObjectMapper(String name, @Nullable String[] types) { + public ObjectMapper getObjectMapper(String name) { return fullPathObjectMappers.get(name); } - public MappedFieldType smartNameFieldType(String smartName) { - MappedFieldType fieldType = fullName(smartName); - if (fieldType != null) { - return fieldType; - } - return indexName(smartName); - } - - // TODO: remove this since the underlying index names are now the same across all types - public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) { - return smartNameFieldType(smartName); - } - /** * Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations. */ @@ -697,7 +669,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { - MappedFieldType fieldType = smartNameFieldType(fieldName); + MappedFieldType fieldType = fullName(fieldName); if (fieldType != null) { Analyzer analyzer = extractAnalyzer.apply(fieldType); if (analyzer != null) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 4fe0eb166c4..3c80106c4f0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -170,14 +170,14 @@ public class BinaryFieldMapper extends FieldMapper { return; } if (fieldType().stored()) { - fields.add(new Field(fieldType().names().indexName(), value, fieldType())); + fields.add(new Field(fieldType().name(), value, fieldType())); } if (fieldType().hasDocValues()) { - CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); + CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name()); if (field == null) { - field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); - context.doc().addWithKey(fieldType().names().indexName(), field); + field = new CustomBinaryDocValuesField(fieldType().name(), value); + context.doc().addWithKey(fieldType().name(), field); } else { field.add(value); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index e381bc9c60b..d5440f227f4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -222,9 +222,9 @@ public class BooleanFieldMapper extends FieldMapper { if (value == null) { return; } - fields.add(new Field(fieldType().names().indexName(), value ? "T" : "F", fieldType())); + fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType())); if (fieldType().hasDocValues()) { - fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value ? 1 : 0)); + fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 9346ebf6b42..fdc50c25034 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -161,7 +161,7 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -171,7 +171,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = parseValue(value); byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -238,7 +238,7 @@ public class ByteFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Byte.toString(value), boost); + context.allEntries().addText(fieldType().name(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -249,7 +249,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -278,7 +278,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index f4060dee821..1e45780cf18 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -326,15 +326,15 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp CompletionFieldType other = (CompletionFieldType)fieldType; if (preservePositionIncrements != other.preservePositionIncrements) { - conflicts.add("mapper [" + names().fullName() + "] has different [preserve_position_increments] values"); + conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values"); } if (preserveSep != other.preserveSep) { - conflicts.add("mapper [" + names().fullName() + "] has different [preserve_separators] values"); + conflicts.add("mapper [" + name() + "] has different [preserve_separators] values"); } if (hasContextMappings() != other.hasContextMappings()) { - conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values"); + conflicts.add("mapper [" + name() + "] has different [context_mappings] values"); } else if (hasContextMappings() && contextMappings.equals(other.contextMappings) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values"); + conflicts.add("mapper [" + name() + "] has different [context_mappings] values"); } } @@ -446,7 +446,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp Token token = parser.currentToken(); Map inputMap = new HashMap<>(1); if (token == Token.VALUE_NULL) { - throw new MapperParsingException("completion field [" + fieldType().names().fullName() + "] does not support null values"); + throw new MapperParsingException("completion field [" + fieldType().name() + "] does not support null values"); } else if (token == Token.START_ARRAY) { while ((token = parser.nextToken()) != Token.END_ARRAY) { parse(context, token, parser, inputMap); @@ -469,10 +469,10 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } CompletionInputMetaData metaData = completionInput.getValue(); if (fieldType().hasContextMappings()) { - fieldType().getContextMappings().addField(context.doc(), fieldType().names().indexName(), + fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metaData.weight, metaData.contexts); } else { - context.doc().add(new SuggestField(fieldType().names().indexName(), input, metaData.weight)); + context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight)); } } multiFields.parse(this, context); @@ -536,7 +536,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp weight = weightValue.intValue(); } else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) { if (fieldType().hasContextMappings() == false) { - throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().names().fullName() + "]"); + throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().name() + "]"); } ContextMappings contextMappings = fieldType().getContextMappings(); XContentParser.Token currentToken = parser.currentToken(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 7a99e6b50c0..1bd018f3f6b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -249,7 +249,7 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public String toString(String s) { final StringBuilder sb = new StringBuilder(); - return sb.append(names().indexName()).append(':') + return sb.append(name()).append(':') .append(includeLower ? '[' : '{') .append((lowerTerm == null) ? "*" : lowerTerm.toString()) .append(" TO ") @@ -306,13 +306,13 @@ public class DateFieldMapper extends NumberFieldMapper { if (strict) { DateFieldType other = (DateFieldType)fieldType; if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); } if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); } if (Objects.equals(timeUnit(), other.timeUnit()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); } } } @@ -404,7 +404,7 @@ public class DateFieldMapper extends NumberFieldMapper { // not a time format iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -424,7 +424,7 @@ public class DateFieldMapper extends NumberFieldMapper { } private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), includeLower, includeUpper); @@ -516,7 +516,7 @@ public class DateFieldMapper extends NumberFieldMapper { Long value = null; if (dateAsString != null) { if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), dateAsString, boost); + context.allEntries().addText(fieldType().name(), dateAsString, boost); } value = fieldType().parseStringValue(dateAsString); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 861d33e560e..18fd27883cd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -164,7 +164,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseDoubleValue(lowerTerm), upperTerm == null ? null : parseDoubleValue(upperTerm), includeLower, includeUpper); @@ -174,7 +174,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { double iValue = parseDoubleValue(value); double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -230,7 +230,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).doubleValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Double.toString(value), boost); + context.allEntries().addText(fieldType().name(), Double.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -241,7 +241,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -270,7 +270,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } else { value = parser.doubleValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index ad88c745dfd..4f8477b5f90 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -165,7 +165,7 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -175,7 +175,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = parseValue(value); final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -242,7 +242,7 @@ public class FloatFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).floatValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Float.toString(value), boost); + context.allEntries().addText(fieldType().name(), Float.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -253,7 +253,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -282,7 +282,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } else { value = parser.floatValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 67f6a5e21b3..5fda6045928 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -170,7 +170,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -180,7 +180,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { int iValue = parseValue(value); int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -247,7 +247,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).intValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Integer.toString(value), boost); + context.allEntries().addText(fieldType().name(), Integer.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -258,7 +258,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -287,7 +287,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } else { value = parser.intValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index a19079c3db9..02cfa7f81e7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -168,7 +168,7 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseLongValue(lowerTerm), upperTerm == null ? null : parseLongValue(upperTerm), includeLower, includeUpper); @@ -178,7 +178,7 @@ public class LongFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { long iValue = parseLongValue(value); final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -235,7 +235,7 @@ public class LongFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).longValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Long.toString(value), boost); + context.allEntries().addText(fieldType().name(), Long.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -246,7 +246,7 @@ public class LongFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -275,7 +275,7 @@ public class LongFieldMapper extends NumberFieldMapper { } else { value = parser.longValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 618114685ae..a0a5e5e5bce 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -144,7 +144,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM List conflicts, boolean strict) { super.checkCompatibility(other, conflicts, strict); if (numericPrecisionStep() != other.numericPrecisionStep()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values"); + conflicts.add("mapper [" + name() + "] has different [precision_step] values"); } } @@ -243,7 +243,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; protected final void addDocValue(ParseContext context, List fields, long value) { - fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value)); + fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); } /** @@ -329,7 +329,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM }; public CustomNumericField(Number value, MappedFieldType fieldType) { - super(fieldType.names().indexName(), fieldType); + super(fieldType.name(), fieldType); if (value != null) { this.fieldsData = value; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 017fd6493c5..7c232c46fb5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -166,7 +166,7 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -176,7 +176,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { short iValue = parseValue(value); short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -243,7 +243,7 @@ public class ShortFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Short.toString(value), boost); + context.allEntries().addText(fieldType().name(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -254,7 +254,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -283,7 +283,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 0762c9a6698..f8f9cb25ba1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -248,7 +248,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { - throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); + throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); } this.positionIncrementGap = positionIncrementGap; this.ignoreAbove = ignoreAbove; @@ -315,19 +315,19 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), valueAndBoost.value(), valueAndBoost.boost()); + context.allEntries().addText(fieldType().name(), valueAndBoost.value(), valueAndBoost.boost()); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().names().indexName(), valueAndBoost.value(), fieldType()); + Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); field.setBoost(valueAndBoost.boost()); fields.add(field); } if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(valueAndBoost.value()))); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); } if (fields.isEmpty()) { - context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType().name(), valueAndBoost.value()); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index a485c3727fc..1fd1debe60c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -148,7 +148,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { addIntegerFields(context, fields, count, valueAndBoost.boost()); } if (fields.isEmpty()) { - context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType().name(), valueAndBoost.value()); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 00d39791c9c..3ebba17d65c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -241,10 +241,7 @@ public class TypeParsers { Map.Entry entry = iterator.next(); final String propName = Strings.toUnderscoreCase(entry.getKey()); final Object propNode = entry.getValue(); - if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - builder.indexName(propNode.toString()); - iterator.remove(); - } else if (propName.equals("store")) { + if (propName.equals("store")) { builder.store(parseStore(name, propNode.toString())); iterator.remove(); } else if (propName.equals("index")) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 79261ec3809..11683ee9a8d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -287,20 +287,20 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr super.checkCompatibility(fieldType, conflicts, strict); GeoPointFieldType other = (GeoPointFieldType)fieldType; if (isLatLonEnabled() != other.isLatLonEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]"); + conflicts.add("mapper [" + name() + "] has different [lat_lon]"); } if (isLatLonEnabled() && other.isLatLonEnabled() && latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]"); + conflicts.add("mapper [" + name() + "] has different [precision_step]"); } if (isGeoHashEnabled() != other.isGeoHashEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash]"); + conflicts.add("mapper [" + name() + "] has different [geohash]"); } if (geoHashPrecision() != other.geoHashPrecision()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]"); + conflicts.add("mapper [" + name() + "] has different [geohash_precision]"); } if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]"); + conflicts.add("mapper [" + name() + "] has different [geohash_prefix]"); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index fa61669e800..450c987ae54 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -123,7 +123,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { GeoUtils.normalizePoint(point); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - context.doc().add(new GeoPointField(fieldType().names().indexName(), point.lon(), point.lat(), fieldType() )); + context.doc().add(new GeoPointField(fieldType().name(), point.lon(), point.lat(), fieldType() )); } super.parse(context, point, geoHash); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 735baa88533..589abd99191 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -301,7 +301,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith; if (gpfmMergeWith.coerce.explicit()) { if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { - throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] has different [coerce]"); + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]"); } } @@ -330,17 +330,17 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType()); + Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType()); context.doc().add(field); } super.parse(context, point, geoHash); if (fieldType().hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName()); + CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().name()); if (field == null) { - field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon()); - context.doc().addWithKey(fieldType().names().indexName(), field); + field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon()); + context.doc().addWithKey(fieldType().name(), field); } else { field.add(point.lat(), point.lon()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 1ba49e64d80..9cb38e38a62 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -105,7 +105,7 @@ public class GeoShapeFieldMapper extends FieldMapper { static { // setting name here is a hack so freeze can be called...instead all these options should be // moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers... - FIELD_TYPE.setNames(new MappedFieldType.Names("DoesNotExist")); + FIELD_TYPE.setName("DoesNotExist"); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); @@ -278,10 +278,10 @@ public class GeoShapeFieldMapper extends FieldMapper { throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); } - recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, names().indexName()); + recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name()); recursiveStrategy.setDistErrPct(distanceErrorPct()); recursiveStrategy.setPruneLeafyBranches(false); - termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, names().indexName()); + termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name()); termStrategy.setDistErrPct(distanceErrorPct()); defaultStrategy = resolveStrategy(strategyName); defaultStrategy.setPointsOnly(pointsOnly); @@ -293,33 +293,33 @@ public class GeoShapeFieldMapper extends FieldMapper { GeoShapeFieldType other = (GeoShapeFieldType)fieldType; // prevent user from changing strategies if (strategyName().equals(other.strategyName()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [strategy]"); + conflicts.add("mapper [" + name() + "] has different [strategy]"); } // prevent user from changing trees (changes encoding) if (tree().equals(other.tree()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [tree]"); + conflicts.add("mapper [" + name() + "] has different [tree]"); } if ((pointsOnly() != other.pointsOnly())) { - conflicts.add("mapper [" + names().fullName() + "] has different points_only"); + conflicts.add("mapper [" + name() + "] has different points_only"); } // TODO we should allow this, but at the moment levels is used to build bookkeeping variables // in lucene's SpatialPrefixTree implementations, need a patch to correct that first if (treeLevels() != other.treeLevels()) { - conflicts.add("mapper [" + names().fullName() + "] has different [tree_levels]"); + conflicts.add("mapper [" + name() + "] has different [tree_levels]"); } if (precisionInMeters() != other.precisionInMeters()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision]"); + conflicts.add("mapper [" + name() + "] has different [precision]"); } if (strict) { if (orientation() != other.orientation()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types."); } if (distanceErrorPct() != other.distanceErrorPct()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types."); } } } @@ -450,7 +450,7 @@ public class GeoShapeFieldMapper extends FieldMapper { shape = shapeBuilder.build(); } if (fieldType().pointsOnly() && !(shape instanceof Point)) { - throw new MapperParsingException("[{" + fieldType().names().fullName() + "}] is configured for points only but a " + + throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found"); } Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape); @@ -464,7 +464,7 @@ public class GeoShapeFieldMapper extends FieldMapper { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); } return null; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index bcd094d2ae6..d370cbe9645 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -91,7 +91,7 @@ public class AllFieldMapper extends MetadataFieldMapper { static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); FIELD_TYPE.setTokenized(true); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -246,7 +246,7 @@ public class AllFieldMapper extends MetadataFieldMapper { // reset the entries context.allEntries().reset(); Analyzer analyzer = findAnalyzer(context); - fields.add(new AllField(fieldType().names().indexName(), context.allEntries(), analyzer, fieldType())); + fields.add(new AllField(fieldType().name(), context.allEntries(), analyzer, fieldType())); } private Analyzer findAnalyzer(ParseContext context) { @@ -323,7 +323,7 @@ public class AllFieldMapper extends MetadataFieldMapper { @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { - throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } super.doMerge(mergeWith, updateAllTypes); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index d8d61f4bab3..cd4fdc533e4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -69,7 +69,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -175,7 +175,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { if (strict) { FieldNamesFieldType other = (FieldNamesFieldType)fieldType; if (isEnabled() != other.isEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types."); } } } @@ -290,7 +290,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { for (String path : paths) { for (String fieldName : extractFieldNames(path)) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - document.add(new Field(fieldType().names().indexName(), fieldName, fieldType())); + document.add(new Field(fieldType().name(), fieldName, fieldType())); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 0fe3e10bcb8..a342f5b2180 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -77,7 +77,7 @@ public class IdFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -285,10 +285,10 @@ public class IdFieldMapper extends MetadataFieldMapper { } // else we are in the pre/post parse phase if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - fields.add(new Field(fieldType().names().indexName(), context.id(), fieldType())); + fields.add(new Field(fieldType().name(), context.id(), fieldType())); } if (fieldType().hasDocValues()) { - fields.add(new BinaryDocValuesField(fieldType().names().indexName(), new BytesRef(context.id()))); + fields.add(new BinaryDocValuesField(fieldType().name(), new BytesRef(context.id()))); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index dbbf03b72e2..52a2e97508c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -67,7 +67,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -223,7 +223,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { } public String value(Document document) { - Field field = (Field) document.getField(fieldType().names().indexName()); + Field field = (Field) document.getField(fieldType().name()); return field == null ? null : (String)fieldType().value(field); } @@ -247,7 +247,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { if (!enabledState.enabled) { return; } - fields.add(new Field(fieldType().names().indexName(), context.index(), fieldType())); + fields.add(new Field(fieldType().name(), context.index(), fieldType())); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 65daef2a834..4325dfc8a96 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -75,7 +75,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); JOIN_FIELD_TYPE.setHasDocValues(true); @@ -120,9 +120,9 @@ public class ParentFieldMapper extends MetadataFieldMapper { if (parentType == null) { throw new MapperParsingException("[_parent] field mapping must contain the [type] option"); } - parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(documentType))); + parentJoinFieldType.setName(joinField(documentType)); parentJoinFieldType.setFieldDataType(null); - childJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType))); + childJoinFieldType.setName(joinField(parentType)); return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings()); } } @@ -242,7 +242,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } } } - return new TermsQuery(names().indexName(), bValues); + return new TermsQuery(name(), bValues); } } @@ -269,7 +269,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) { MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); - parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType))); + parentJoinFieldType.setName(joinField(parentType)); parentJoinFieldType.freeze(); return parentJoinFieldType; } @@ -312,7 +312,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { // we are in the parsing of _parent phase String parentId = context.parser().text(); context.sourceToParse().parent(parentId); - fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); + fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); } else { // otherwise, we are running it post processing of the xcontent @@ -324,7 +324,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { throw new MapperParsingException("No parent id provided, not within the document, and not externally"); } // we did not add it in the parsing phase, add it now - fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); + fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); @@ -336,7 +336,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private void addJoinFieldIfNeeded(List fields, MappedFieldType fieldType, String id) { if (fieldType.hasDocValues()) { - fields.add(new SortedDocValuesField(fieldType.names().indexName(), new BytesRef(id))); + fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id))); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 40b7e6871c4..4fb410dbdc8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -62,7 +62,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -179,7 +179,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } public String value(Document document) { - Field field = (Field) document.getField(fieldType().names().indexName()); + Field field = (Field) document.getField(fieldType().name()); return field == null ? null : (String)fieldType().value(field); } @@ -206,10 +206,10 @@ public class RoutingFieldMapper extends MetadataFieldMapper { String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { - context.ignoredValue(fieldType().names().indexName(), routing); + context.ignoredValue(fieldType().name(), routing); return; } - fields.add(new Field(fieldType().names().indexName(), routing, fieldType())); + fields.add(new Field(fieldType().name(), routing, fieldType())); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 40bf9eb0c8e..b0de09edafb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -74,7 +74,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -272,7 +272,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { if (!source.hasArray()) { source = source.toBytesArray(); } - fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length())); + fields.add(new StoredField(fieldType().name(), source.array(), source.arrayOffset(), source.length())); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index f99ca18600a..4612b9fb85f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -64,7 +64,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); TTL_FIELD_TYPE.setIndexAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_64_BIT)); TTL_FIELD_TYPE.setSearchAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE)); - TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + TTL_FIELD_TYPE.setName(NAME); TTL_FIELD_TYPE.freeze(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 9fdb9b586e4..3771747bf66 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -67,7 +67,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setStored(true); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT)); FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE)); @@ -313,13 +313,13 @@ public class TimestampFieldMapper extends MetadataFieldMapper { if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { - context.ignoredValue(fieldType().names().indexName(), String.valueOf(timestamp)); + context.ignoredValue(fieldType().name(), String.valueOf(timestamp)); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new LongFieldMapper.CustomLongNumericField(timestamp, fieldType())); } if (fieldType().hasDocValues()) { - fields.add(new NumericDocValuesField(fieldType().names().indexName(), timestamp)); + fields.add(new NumericDocValuesField(fieldType().name(), timestamp)); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index 15fbd6fd123..05c13d01339 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -70,7 +70,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -84,7 +84,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { @Override public TypeFieldMapper build(BuilderContext context) { - fieldType.setNames(buildNames(context)); + fieldType.setName(buildFullName(context)); return new TypeFieldMapper(fieldType, context.indexSettings()); } } @@ -186,9 +186,9 @@ public class TypeFieldMapper extends MetadataFieldMapper { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { return; } - fields.add(new Field(fieldType().names().indexName(), context.type(), fieldType())); + fields.add(new Field(fieldType().name(), context.type(), fieldType())); if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(context.type()))); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.type()))); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index 10f9880d97d..532263b1506 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -66,7 +66,7 @@ public class UidFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); NESTED_FIELD_TYPE = FIELD_TYPE.clone(); @@ -193,7 +193,7 @@ public class UidFieldMapper extends MetadataFieldMapper { } public Term term(String uid) { - return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid)); + return new Term(fieldType().name(), fieldType().indexedValueForSearch(uid)); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 6b1471afda7..e9577b9d509 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -51,7 +51,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static final MappedFieldType FIELD_TYPE = new VersionFieldType(); static { - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.freeze(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 598e1d365fd..9984463ffc0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -229,7 +229,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -244,7 +244,7 @@ public class IpFieldMapper extends NumberFieldMapper { } catch (IllegalArgumentException e) { iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -287,7 +287,7 @@ public class IpFieldMapper extends NumberFieldMapper { return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), ipAsString, fieldType().boost()); + context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); } final long value = ipToLong(ipAsString); diff --git a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java b/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java index 26b52f773e9..c79c7d7da25 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java @@ -54,7 +54,7 @@ final class QueriesLoaderCollector extends SimpleCollector { QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger, MapperService mapperService, IndexFieldDataService indexFieldDataService) { this.percolator = percolator; this.logger = logger; - final MappedFieldType uidMapper = mapperService.smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = mapperService.fullName(UidFieldMapper.NAME); this.uidFieldData = indexFieldDataService.getForField(uidMapper); } diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index 20d0b62b725..550ffe89882 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -235,7 +235,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder if (fieldNamesFieldType.isEnabled()) { final String f; if (fieldType != null) { - f = fieldType.names().indexName(); + f = fieldType.name(); } else { f = field; } diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index e9258d7cfc1..9ce592ca4cf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -87,7 +87,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder { MappedFieldType fieldType = context.fieldMapper(fieldName); String indexFieldName; if (fieldType != null) { - indexFieldName = fieldType.names().indexName(); + indexFieldName = fieldType.name(); } else { indexFieldName = fieldName; } diff --git a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 7c3cc1c30a3..314bc6faeb2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -118,7 +118,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder ext @Override protected String getFieldName() { - return fieldData.getFieldNames().fullName(); + return fieldData.getFieldName(); } @Override @@ -450,7 +450,7 @@ public abstract class DecayFunctionBuilder ext @Override protected String getFieldName() { - return fieldData.getFieldNames().fullName(); + return fieldData.getFieldName(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java index 4a73d4b2bec..d686e78635b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java @@ -148,7 +148,7 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder()); } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java index ea8ff8ca6d3..6f92e411c00 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java @@ -123,7 +123,7 @@ public class GeoDistanceRangeQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -198,7 +198,7 @@ public class GeoDistanceRangeQuery extends Query { if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false; if (Double.compare(filter.lat, lat) != 0) return false; if (Double.compare(filter.lon, lon) != 0) return false; - if (!indexFieldData.getFieldNames().indexName().equals(filter.indexFieldData.getFieldNames().indexName())) + if (!indexFieldData.getFieldName().equals(filter.indexFieldData.getFieldName())) return false; if (geoDistance != filter.geoDistance) return false; @@ -207,7 +207,7 @@ public class GeoDistanceRangeQuery extends Query { @Override public String toString(String field) { - return "GeoDistanceRangeQuery(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; + return "GeoDistanceRangeQuery(" + indexFieldData.getFieldName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; } @Override @@ -223,7 +223,7 @@ public class GeoDistanceRangeQuery extends Query { temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L; result = 31 * result + Long.hashCode(temp); result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0); - result = 31 * result + indexFieldData.getFieldNames().indexName().hashCode(); + result = 31 * result + indexFieldData.getFieldName().hashCode(); return result; } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java index 71e369cce0b..d62aa76efd9 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java @@ -51,7 +51,7 @@ public class GeoPolygonQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -104,7 +104,7 @@ public class GeoPolygonQuery extends Query { @Override public String toString(String field) { StringBuilder sb = new StringBuilder("GeoPolygonQuery("); - sb.append(indexFieldData.getFieldNames().indexName()); + sb.append(indexFieldData.getFieldName()); sb.append(", ").append(Arrays.toString(points)).append(')'); return sb.toString(); } @@ -115,14 +115,14 @@ public class GeoPolygonQuery extends Query { return false; } GeoPolygonQuery that = (GeoPolygonQuery) obj; - return indexFieldData.getFieldNames().indexName().equals(that.indexFieldData.getFieldNames().indexName()) + return indexFieldData.getFieldName().equals(that.indexFieldData.getFieldName()) && Arrays.equals(points, that.points); } @Override public int hashCode() { int h = super.hashCode(); - h = 31 * h + indexFieldData.getFieldNames().indexName().hashCode(); + h = 31 * h + indexFieldData.getFieldName().hashCode(); h = 31 * h + Arrays.hashCode(points); return h; } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java index a2e9e1b689d..2f2801a2abe 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java @@ -57,7 +57,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -79,7 +79,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query { @Override public String toString(String field) { - return "GeoBoundingBoxFilter(" + indexFieldData.getFieldNames().indexName() + ", " + topLeft + ", " + bottomRight + ")"; + return "GeoBoundingBoxFilter(" + indexFieldData.getFieldName() + ", " + topLeft + ", " + bottomRight + ")"; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java index 43fe1441ced..436fc80d521 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java @@ -32,7 +32,7 @@ public class IndexedGeoBoundingBoxQuery { public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) { if (!fieldType.isLatLonEnabled()) { - throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.names().fullName() + "], can't use indexed filter on it"); + throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() + "], can't use indexed filter on it"); } //checks to see if bounding box crosses 180 degrees if (topLeft.lon() > bottomRight.lon()) { diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 1d08683f47b..67cc3b764e3 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -129,7 +129,7 @@ public final class SimilarityService extends AbstractIndexComponent { @Override public Similarity get(String name) { - MappedFieldType fieldType = mapperService.smartNameFieldType(name); + MappedFieldType fieldType = mapperService.fullName(name); return (fieldType != null && fieldType.similarity() != null) ? fieldType.similarity().get() : defaultSimilarity; } } diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 0a8a8a7addf..17777756056 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -188,7 +188,7 @@ public class TermVectorsService { /* only keep valid fields */ Set validFields = new HashSet<>(); for (String field : selectedFields) { - MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field); + MappedFieldType fieldType = indexShard.mapperService().fullName(field); if (!isValidField(fieldType)) { continue; } @@ -223,7 +223,7 @@ public class TermVectorsService { if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString()); } else { - analyzer = mapperService.smartNameFieldType(field).indexAnalyzer(); + analyzer = mapperService.fullName(field).indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.analysisService().defaultIndexAnalyzer(); @@ -269,7 +269,7 @@ public class TermVectorsService { Set seenFields = new HashSet<>(); Collection getFields = new HashSet<>(); for (IndexableField field : doc.getFields()) { - MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field.name()); + MappedFieldType fieldType = indexShard.mapperService().fullName(field.name()); if (!isValidField(fieldType)) { continue; } diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 73095f8ee5d..9181c6248ea 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -91,8 +90,8 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL this.closed = true; } - public IndexFieldDataCache buildIndexFieldDataCache(IndexFieldDataCache.Listener listener, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) { - return new IndexFieldCache(logger, cache, index, fieldNames, fieldDataType, indicesFieldDataCacheListener, listener); + public IndexFieldDataCache buildIndexFieldDataCache(IndexFieldDataCache.Listener listener, Index index, String fieldName, FieldDataType fieldDataType) { + return new IndexFieldCache(logger, cache, index, fieldName, fieldDataType, indicesFieldDataCacheListener, listener); } public Cache getCache() { @@ -107,7 +106,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final Accountable value = notification.getValue(); for (IndexFieldDataCache.Listener listener : key.listeners) { try { - listener.onRemoval(key.shardId, indexCache.fieldNames, indexCache.fieldDataType, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed()); + listener.onRemoval(key.shardId, indexCache.fieldName, indexCache.fieldDataType, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed()); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on field data cache unloading", e); @@ -129,16 +128,16 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL static class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener, IndexReader.ReaderClosedListener { private final ESLogger logger; final Index index; - final MappedFieldType.Names fieldNames; + final String fieldName; final FieldDataType fieldDataType; private final Cache cache; private final Listener[] listeners; - IndexFieldCache(ESLogger logger,final Cache cache, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Listener... listeners) { + IndexFieldCache(ESLogger logger,final Cache cache, Index index, String fieldName, FieldDataType fieldDataType, Listener... listeners) { this.logger = logger; this.listeners = listeners; this.index = index; - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.cache = cache; } @@ -156,7 +155,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final AtomicFieldData fieldData = indexFieldData.loadDirect(context); for (Listener listener : k.listeners) { try { - listener.onCache(shardId, fieldNames, fieldDataType, fieldData); + listener.onCache(shardId, fieldName, fieldDataType, fieldData); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on atomic field data loading", e); @@ -180,7 +179,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final Accountable ifd = (Accountable) indexFieldData.localGlobalDirect(indexReader); for (Listener listener : k.listeners) { try { - listener.onCache(shardId, fieldNames, fieldDataType, ifd); + listener.onCache(shardId, fieldName, fieldDataType, ifd); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on global ordinals loading", e); @@ -218,7 +217,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL public void clear(String fieldName) { for (Key key : cache.keys()) { if (key.indexCache.index.equals(index)) { - if (key.indexCache.fieldNames.fullName().equals(fieldName)) { + if (key.indexCache.fieldName.equals(fieldName)) { cache.invalidate(key); } } diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java index 06257806af5..c37cf6def79 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -44,11 +43,11 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable fieldData) { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]"; circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes); } diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index a7f931129fe..4f48e4f7b93 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -196,7 +196,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent shardsToPurge) { for (IndexShard shardToPurge : shardsToPurge) { - Query query = shardToPurge.mapperService().smartNameFieldType(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true); + Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true); Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl"); try { logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id()); diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 7e2103fff93..267ea7a50bf 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -637,12 +637,7 @@ public class PercolateContext extends SearchContext { @Override public MappedFieldType smartNameFieldType(String name) { - return mapperService().smartNameFieldType(name, types); - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } @Override diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index 0f0b571ff2d..8cc691b866b 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -760,7 +760,7 @@ public class PercolatorService extends AbstractComponent { hls = new ArrayList<>(topDocs.scoreDocs.length); } - final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = context.mapperService().fullName(UidFieldMapper.NAME); final IndexFieldData uidFieldData = context.fieldData().getForField(uidMapper); int i = 0; for (ScoreDoc scoreDoc : topDocs.scoreDocs) { diff --git a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java index 33a8dc813fe..828ff4f08e4 100644 --- a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java +++ b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java @@ -73,7 +73,7 @@ abstract class QueryCollector extends SimpleCollector { this.logger = logger; this.queries = context.percolateQueries(); this.searcher = context.docSearcher(); - final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = context.mapperService().fullName(UidFieldMapper.NAME); this.uidFieldData = context.fieldData().getForField(uidMapper); this.isNestedDoc = isNestedDoc; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 6bfd3f08a33..02efa373ab0 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -971,7 +971,7 @@ public class SearchService extends AbstractLifecycleComponent imp final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final String indexName = fieldMapper.fieldType().names().indexName(); + final String indexName = fieldMapper.fieldType().name(); Loading normsLoading = fieldMapper.fieldType().normsLoading(); if (normsLoading == null) { normsLoading = defaultLoading; @@ -1047,10 +1047,10 @@ public class SearchService extends AbstractLifecycleComponent imp fieldDataType = joinFieldType.fieldDataType(); // TODO: this can be removed in 3.0 when the old parent/child impl is removed: // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } else { fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } if (fieldDataType == null) { @@ -1079,10 +1079,10 @@ public class SearchService extends AbstractLifecycleComponent imp final long start = System.nanoTime(); indexFieldDataService.getForField(fieldType).load(ctx); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.name()); } finally { latch.countDown(); } @@ -1115,10 +1115,10 @@ public class SearchService extends AbstractLifecycleComponent imp fieldDataType = joinFieldType.fieldDataType(); // TODO: this can be removed in 3.0 when the old parent/child impl is removed: // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } else { fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } if (fieldDataType == null) { continue; @@ -1144,10 +1144,10 @@ public class SearchService extends AbstractLifecycleComponent imp IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType); ifd.loadGlobal(searcher.getDirectoryReader()); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.name()); } finally { latch.countDown(); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java index 2c597bdcee9..438e872be77 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java @@ -88,7 +88,7 @@ public class ChildrenParser implements Aggregator.Parser { parentFilter = parentDocMapper.typeFilter(); childFilter = childDocMapper.typeFilter(); ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper.fieldType()); - config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper.fieldType())); + config.fieldContext(new FieldContext(parentFieldMapper.fieldType().name(), parentChildIndexFieldData, parentFieldMapper.fieldType())); } else { config.unmapped(true); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java index a9dcc77ee9f..fced5fdc913 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java @@ -191,7 +191,7 @@ public class ValuesSourceParser { return config; } - MappedFieldType fieldType = context.smartNameFieldTypeFromAnyType(input.field); + MappedFieldType fieldType = context.smartNameFieldType(input.field); if (fieldType == null) { Class valuesSourceType = valueType != null ? (Class) valueType.getValuesSourceType() : this.valuesSourceType; ValuesSourceConfig config = new ValuesSourceConfig<>(valuesSourceType); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java index c74ef7b0c73..16b1311809c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java @@ -94,7 +94,7 @@ public class FieldDataFieldsFetchSubPhase implements FetchSubPhase { hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2)); hitContext.hit().fields().put(field.name(), hitField); } - MappedFieldType fieldType = context.mapperService().smartNameFieldType(field.name()); + MappedFieldType fieldType = context.mapperService().fullName(field.name()); if (fieldType != null) { AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext()); ScriptDocValues values = data.getScriptValues(); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index b57899b2e17..51c56e676c1 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -151,10 +151,10 @@ public class FastVectorHighlighter implements Highlighter { // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } @@ -167,7 +167,7 @@ public class FastVectorHighlighter implements Highlighter { // Essentially we just request that a fragment is built from 0 to noMatchSize using the normal fragmentsBuilder FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java index db64af82398..d4095c14ec0 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java @@ -48,9 +48,9 @@ public final class HighlightUtils { boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; if (!forceSource && mapper.fieldType().stored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().names().indexName()), false); + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().names().indexName()); + textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().name()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = Collections.emptyList(); @@ -58,7 +58,7 @@ public final class HighlightUtils { } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); - textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); } assert textsToHighlight != null; return textsToHighlight; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 5f4cdddb060..4bd27e11795 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -72,7 +72,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -108,7 +108,7 @@ public class PlainHighlighter implements Highlighter { for (Object textToHighlight : textsToHighlight) { String text = textToHighlight.toString(); - try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text)) { + try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; @@ -165,7 +165,7 @@ public class PlainHighlighter implements Highlighter { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().names().indexName(), fieldContents); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().name(), fieldContents); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index 2509f95da59..51c460c5c68 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -93,7 +93,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index eb17daec5f1..363a3b9137d 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -55,7 +55,7 @@ public final class FragmentBuilderHelper { */ public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; - assert mapper.fieldType().names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); + assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.fieldType().indexAnalyzer()))) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java index af914cebf6c..2d226aabf94 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java @@ -59,10 +59,10 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java index 222f00a5597..b80c239376e 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java @@ -55,13 +55,13 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); if (values.isEmpty()) { return EMPTY_FIELDS; } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 8190d5257d9..c3eef7559a0 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -699,17 +699,12 @@ public class DefaultSearchContext extends SearchContext { @Override public MappedFieldType smartNameFieldType(String name) { - return mapperService().smartNameFieldType(name, request.types()); - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } @Override public ObjectMapper getObjectMapper(String name) { - return mapperService().getObjectMapper(name, request.types()); + return mapperService().getObjectMapper(name); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 1f04d013401..eaa14933b33 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -496,11 +496,6 @@ public abstract class FilteredSearchContext extends SearchContext { return in.smartNameFieldType(name); } - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return in.smartNameFieldTypeFromAnyType(name); - } - @Override public ObjectMapper getObjectMapper(String name) { return in.getObjectMapper(name); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 5c4e70c1004..76164b5c0f8 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -343,12 +343,10 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple } } - public abstract MappedFieldType smartNameFieldType(String name); - /** * Looks up the given field, but does not restrict to fields in the types set on this context. */ - public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name); + public abstract MappedFieldType smartNameFieldType(String name); public abstract ObjectMapper getObjectMapper(String name); diff --git a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index ec4b5a041ff..249a23b9bfc 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -85,7 +85,7 @@ public class FieldLookup { } valueLoaded = true; value = null; - List values = fields.get(fieldType.names().indexName()); + List values = fields.get(fieldType.name()); return values != null ? value = values.get(0) : null; } @@ -95,6 +95,6 @@ public class FieldLookup { } valuesLoaded = true; values.clear(); - return values = fields().get(fieldType.names().indexName()); + return values = fields().get(fieldType.name()); } } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java index 3864581034e..db20a03f825 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java @@ -75,7 +75,7 @@ public class LeafDocLookup implements Map { String fieldName = key.toString(); ScriptDocValues scriptValues = localCacheFieldData.get(fieldName); if (scriptValues == null) { - final MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types); + final MappedFieldType fieldType = mapperService.fullName(fieldName); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types) + ""); } @@ -99,7 +99,7 @@ public class LeafDocLookup implements Map { String fieldName = key.toString(); ScriptDocValues scriptValues = localCacheFieldData.get(fieldName); if (scriptValues == null) { - MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types); + MappedFieldType fieldType = mapperService.fullName(fieldName); if (fieldType == null) { return false; } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index e5295e80fb3..a5f90aa2c90 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -136,7 +136,7 @@ public class LeafFieldsLookup implements Map { private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { - MappedFieldType fieldType = mapperService.smartNameFieldType(name, types); + MappedFieldType fieldType = mapperService.fullName(name); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types) + ""); } @@ -144,12 +144,12 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.fieldType().names().indexName(); + String fieldName = data.fieldType().name(); fieldVisitor.reset(fieldName); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(data.fieldType()); - data.fields(singletonMap(name, fieldVisitor.fields().get(data.fieldType().names().indexName()))); + data.fields(singletonMap(name, fieldVisitor.fields().get(data.fieldType().name()))); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 80a9daa53f0..a99158787d3 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -253,7 +253,7 @@ public class SortParseElement implements SearchParseElement { IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldType) .comparatorSource(missing, sortMode, nested); - sortFields.add(new SortField(fieldType.names().indexName(), fieldComparatorSource, reverse)); + sortFields.add(new SortField(fieldType.name(), fieldComparatorSource, reverse)); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 99842ca77bb..a2e5f743c59 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -140,13 +140,13 @@ public class CompletionSuggestParser implements SuggestContextParser { final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService); TLP_PARSER.parse(parser, suggestion, contextAndSuggest); final XContentParser contextParser = contextAndSuggest.contextParser; - MappedFieldType mappedFieldType = mapperService.smartNameFieldType(suggestion.getField()); + MappedFieldType mappedFieldType = mapperService.fullName(suggestion.getField()); if (mappedFieldType == null) { throw new ElasticsearchException("Field [" + suggestion.getField() + "] is not a completion suggest field"); } else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; if (type.hasContextMappings() == false && contextParser != null) { - throw new IllegalArgumentException("suggester [" + type.names().fullName() + "] doesn't expect any context"); + throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } Map> queryContexts = Collections.emptyMap(); if (type.hasContextMappings() && contextParser != null) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 75211e85205..527a35658c9 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -84,7 +84,7 @@ public class CompletionSuggester extends Suggester final LeafReaderContext subReaderContext = leaves.get(readerIndex); final int subDocId = suggestDoc.doc - subReaderContext.docBase; for (String field : payloadFields) { - MappedFieldType payloadFieldType = suggestionContext.getMapperService().smartNameFieldType(field); + MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field); if (payloadFieldType != null) { final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType).load(subReaderContext); final ScriptDocValues scriptValues = data.getScriptValues(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 9b083a91788..0b904a95720 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -171,7 +171,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("The required field option is missing"); } - MappedFieldType fieldType = mapperService.smartNameFieldType(suggestion.getField()); + MappedFieldType fieldType = mapperService.fullName(suggestion.getField()); if (fieldType == null) { throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]"); } else if (suggestion.getAnalyzer() == null) { @@ -329,7 +329,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) { if ("field".equals(fieldName)) { generator.setField(parser.text()); - if (mapperService.smartNameFieldType(generator.field()) == null) { + if (mapperService.fullName(generator.field()) == null) { throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]"); } } else if ("size".equals(fieldName)) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index b3485f3f212..13f7f74e37b 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -90,14 +90,12 @@ public class FieldDataCacheTests extends ESTestCase { private SortedSetDVOrdinalsIndexFieldData createSortedDV(String fieldName, IndexFieldDataCache indexFieldDataCache) { FieldDataType fieldDataType = new StringFieldMapper.StringFieldType().fieldDataType(); - MappedFieldType.Names names = new MappedFieldType.Names(fieldName); - return new SortedSetDVOrdinalsIndexFieldData(createIndexSettings(), indexFieldDataCache, names, new NoneCircuitBreakerService(), fieldDataType); + return new SortedSetDVOrdinalsIndexFieldData(createIndexSettings(), indexFieldDataCache, fieldName, new NoneCircuitBreakerService(), fieldDataType); } private PagedBytesIndexFieldData createPagedBytes(String fieldName, IndexFieldDataCache indexFieldDataCache) { FieldDataType fieldDataType = new StringFieldMapper.StringFieldType().fieldDataType(); - MappedFieldType.Names names = new MappedFieldType.Names(fieldName); - return new PagedBytesIndexFieldData(createIndexSettings(), names, fieldDataType, indexFieldDataCache, new NoneCircuitBreakerService()); + return new PagedBytesIndexFieldData(createIndexSettings(), fieldName, fieldDataType, indexFieldDataCache, new NoneCircuitBreakerService()); } private IndexSettings createIndexSettings() { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index c9ac901d44f..3d4f63daa33 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; @@ -151,7 +150,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { final AtomicInteger onRemovalCalled = new AtomicInteger(); ifdService.setListener(new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { if (wrap) { assertEquals(new ShardId("test", 1), shardId); } else { @@ -161,7 +160,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wrap) { assertEquals(new ShardId("test", 1), shardId); } else { @@ -189,12 +188,12 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { try { shardPrivateService.setListener(new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { } }); @@ -209,7 +208,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { try { IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool); IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null); - ft.setNames(new Names("some_long")); + ft.setName("some_long"); ft.setHasDocValues(true); ifds.getForField(ft); // no exception ft.setHasDocValues(false); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 561252a5c71..f1fb694b295 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; /** Returns an implementation based on paged bytes which doesn't implement WithOrdinals in order to visit different paths in the code, @@ -39,8 +38,8 @@ public class NoOrdinalsStringFieldDataTests extends PagedBytesStringFieldDataTes } @Override - public Names getFieldNames() { - return in.getFieldNames(); + public String getFieldName() { + return in.getFieldName(); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java index 90976dac4b1..06307614a51 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -109,14 +109,14 @@ public class DocumentFieldMapperTests extends LuceneTestCase { public void testAnalyzers() throws IOException { FakeFieldType fieldType1 = new FakeFieldType(); - fieldType1.setNames(new MappedFieldType.Names("field1")); + fieldType1.setName("field1"); fieldType1.setIndexAnalyzer(new NamedAnalyzer("foo", new FakeAnalyzer("index"))); fieldType1.setSearchAnalyzer(new NamedAnalyzer("bar", new FakeAnalyzer("search"))); fieldType1.setSearchQuoteAnalyzer(new NamedAnalyzer("baz", new FakeAnalyzer("search_quote"))); FieldMapper fieldMapper1 = new FakeFieldMapper("field1", fieldType1); FakeFieldType fieldType2 = new FakeFieldType(); - fieldType2.setNames(new MappedFieldType.Names("field2")); + fieldType2.setName("field2"); FieldMapper fieldMapper2 = new FakeFieldMapper("field2", fieldType2); Analyzer defaultIndex = new FakeAnalyzer("default_index"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 8452c836041..c5dbd653bfe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -38,15 +38,10 @@ public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { FieldTypeLookup lookup = new FieldTypeLookup(); assertNull(lookup.get("foo")); - assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); - names = lookup.simpleMatchToIndexNames("foo"); - assertNotNull(names); - assertTrue(names.isEmpty()); Iterator itr = lookup.iterator(); assertNotNull(itr); assertFalse(itr.hasNext()); @@ -64,55 +59,45 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); - FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); + FakeFieldMapper f = new FakeFieldMapper("foo"); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); - assertNull(lookup.getByIndexName("foo")); - assertNull(lookup.getByIndexName("bar")); assertEquals(f.fieldType(), lookup2.get("foo")); assertNull(lookup.get("bar")); - assertEquals(f.fieldType(), lookup2.getByIndexName("bar")); - assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("bar")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar")); assertEquals(Collections.singleton("type"), lookup2.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup2.getTypes("bar")); - assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar")); assertEquals(1, size(lookup2.iterator())); } public void testAddExistingField() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); assertSame(f2.fieldType(), lookup2.get("foo")); - assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(1, size(lookup2.iterator())); } public void testAddExistingIndexName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); assertSame(f.fieldType(), lookup2.get("foo")); assertSame(f2.fieldType(), lookup2.get("bar")); - assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(2, size(lookup2.iterator())); } public void testAddExistingFullName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); try { lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); @@ -121,33 +106,12 @@ public class FieldTypeLookupTests extends ESTestCase { } } - public void testAddExistingBridgeName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f, f2), randomBoolean()); - - try { - FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); - lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); - } catch (IllegalStateException e) { - assertTrue(e.getMessage().contains("insane mappings")); - } - - try { - FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); - lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); - } catch (IllegalStateException e) { - assertTrue(e.getMessage().contains("insane mappings")); - } - } - public void testCheckCompatibilityMismatchedTypes() { - FieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); + MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo"); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { lookup.copyAndAddAll("type2", newList(f2), false); @@ -165,11 +129,11 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testCheckCompatibilityConflict() { - FieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); + MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo"); ft2.setBoost(2.0f); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { @@ -182,7 +146,7 @@ public class FieldTypeLookupTests extends ESTestCase { lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting - MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); + MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo"); ft3.setStored(true); FieldMapper f3 = new FakeFieldMapper("foo", ft3); try { @@ -200,28 +164,18 @@ public class FieldTypeLookupTests extends ESTestCase { } } - public void testSimpleMatchIndexNames() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); - Collection names = lookup.simpleMatchToIndexNames("b*"); - assertTrue(names.contains("baz")); - assertTrue(names.contains("boo")); - } - public void testSimpleMatchFullNames() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); + FakeFieldMapper f1 = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection names = lookup.simpleMatchToFullName("b*"); - assertTrue(names.contains("foo")); + assertFalse(names.contains("foo")); assertTrue(names.contains("bar")); } public void testIteratorImmutable() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FakeFieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); @@ -243,20 +197,20 @@ public class FieldTypeLookupTests extends ESTestCase { // this sucks how much must be overridden just do get a dummy field mapper... static class FakeFieldMapper extends FieldMapper { static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); - public FakeFieldMapper(String fullName, String indexName) { - super(fullName, makeFieldType(fullName, indexName), makeFieldType(fullName, indexName), dummySettings, null, null); + public FakeFieldMapper(String fullName) { + super(fullName, makeFieldType(fullName), makeFieldType(fullName), dummySettings, null, null); } public FakeFieldMapper(String fullName, MappedFieldType fieldType) { super(fullName, fieldType, fieldType, dummySettings, null, null); } - static MappedFieldType makeFieldType(String fullName, String indexName) { + static MappedFieldType makeFieldType(String fullName) { FakeFieldType fieldType = new FakeFieldType(); - fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); + fieldType.setName(fullName); return fieldType; } - static MappedFieldType makeOtherFieldType(String fullName, String indexName) { + static MappedFieldType makeOtherFieldType(String fullName) { OtherFakeFieldType fieldType = new OtherFakeFieldType(); - fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); + fieldType.setName(fullName); return fieldType; } static class FakeFieldType extends MappedFieldType { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index ca0cbf194d6..c8d7e4ac147 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -173,7 +173,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { MappedFieldType createNamedDefaultFieldType() { MappedFieldType fieldType = createDefaultFieldType(); - fieldType.setNames(new MappedFieldType.Names("foo")); + fieldType.setName("foo"); return fieldType; } @@ -213,7 +213,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { protected String toString(MappedFieldType ft) { return "MappedFieldType{" + - "names=" + ft.names() + + "name=" + ft.name() + ", boost=" + ft.boost() + ", docValues=" + ft.hasDocValues() + ", indexAnalyzer=" + ft.indexAnalyzer() + diff --git a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java index 5422d6c2f7d..6de07d8cd0f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java @@ -162,7 +162,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .field("completion", "suggestion") .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -181,7 +181,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .array("completion", "suggestion1", "suggestion2") .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 2); } @@ -203,7 +203,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -225,7 +225,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -257,7 +257,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -289,7 +289,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 6); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index 6ff0a428f79..9223b640024 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -45,7 +45,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { private static MappedFieldType FIELD_TYPE = new BooleanFieldMapper.BooleanFieldType(); static { - FIELD_TYPE.setNames(new MappedFieldType.Names(FIELD_NAME)); + FIELD_TYPE.setName(FIELD_NAME); FIELD_TYPE.freeze(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index e05bc21ba89..c15ebae636d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -244,7 +244,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { static { FIELD_TYPE.setTokenized(false); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setNames(new MappedFieldType.Names("_dummy")); + FIELD_TYPE.setName("_dummy"); FIELD_TYPE.freeze(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java index 879c6590751..0d52b66dfb6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java @@ -42,12 +42,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); @@ -60,12 +60,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); @@ -78,12 +78,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 230af8d3a50..ed9792fb44e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -55,7 +55,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); doc = docMapper.parse("test", "person", "1", json).rootDoc(); } @@ -68,8 +68,8 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testSimpleParser() throws Exception { @@ -80,8 +80,8 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testSimpleParserNoTypeNoId() throws Exception { @@ -89,8 +89,8 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testAttributes() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java index c0ffcfde361..f46f2982091 100644 --- a/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -46,7 +46,7 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase { @Override - public MappedFieldType.Names getFieldNames() { - return new MappedFieldType.Names("test"); + public String getFieldName() { + return "test"; } @Override @@ -171,8 +170,8 @@ public class FunctionScoreTests extends ESTestCase { } @Override - public MappedFieldType.Names getFieldNames() { - return new MappedFieldType.Names("test"); + public String getFieldName() { + return "test"; } @Override diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index d749ba56594..64ec036ea1a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -984,7 +984,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexShard newShard = reinitWithWrapper(indexService, shard, wrapper); try { // test global ordinals are evicted - MappedFieldType foo = newShard.mapperService().indexName("foo"); + MappedFieldType foo = newShard.mapperService().fullName("foo"); IndexFieldData.Global ifd = shard.indexFieldDataService().getForField(foo); FieldDataStats before = shard.fieldData().stats("foo"); assertThat(before.getMemorySizeInBytes(), equalTo(0l)); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 1dc7d20c7b7..0d27ba04a91 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -80,7 +80,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -113,7 +113,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -144,7 +144,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -182,7 +182,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 9e5dd86fea6..b42af82433b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -80,7 +80,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -117,7 +117,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -156,7 +156,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .field("weight", 5) .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -194,7 +194,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index a92e8d9dd38..c50aa4da289 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -67,6 +67,6 @@ public class CountMethodValueSource extends ValueSource { @Override public String description() { - return "count: field(" + fieldData.getFieldNames().toString() + ")"; + return "count: field(" + fieldData.getFieldName() + ")"; } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index 8bbf6251fc3..9efeed54ff9 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -54,7 +54,7 @@ class DateMethodValueSource extends FieldDataValueSource { @Override public String description() { - return methodName + ": field(" + fieldData.getFieldNames().toString() + ")"; + return methodName + ": field(" + fieldData.getFieldName() + ")"; } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index cf6017a32ca..192f69884e8 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -184,7 +184,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements throw new ScriptException("Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()"); } - MappedFieldType fieldType = mapper.smartNameFieldType(fieldname); + MappedFieldType fieldType = mapper.fullName(fieldname); if (fieldType == null) { throw new ScriptException("Field [" + fieldname + "] used in expression does not exist in mappings"); diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java index 39386ee4913..708cd0af152 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java @@ -75,6 +75,6 @@ class FieldDataValueSource extends ValueSource { @Override public String description() { - return "field(" + fieldData.getFieldNames().toString() + ")"; + return "field(" + fieldData.getFieldName() + ")"; } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java index 37132c5a923..98a23b3e1fd 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java @@ -77,7 +77,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // left orientation test IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); IndexService indexService = indicesService.indexService(idxName); - MappedFieldType fieldType = indexService.mapperService().smartNameFieldType("location"); + MappedFieldType fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); GeoShapeFieldMapper.GeoShapeFieldType gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; @@ -89,7 +89,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // right orientation test indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); indexService = indicesService.indexService(idxName+"2"); - fieldType = indexService.mapperService().smartNameFieldType("location"); + fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index f550193b847..21627daeb53 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -61,19 +61,19 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), startsWith("text/html;")); - assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().name()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); - assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), nullValue()); - assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().name()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().name()), nullValue()); } public void testMultipleDocsEncryptedFirst() throws IOException { @@ -90,19 +90,19 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file1").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), nullValue()); - assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().name()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().name()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), startsWith("text/html;")); - assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().name()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); } public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java index 190231eb95e..5d81df7a7fe 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java @@ -77,7 +77,7 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa ParseContext.Document doc = docMapper.parse("person", "person", "1", xcb.bytes()).rootDoc(); // Our mapping should be kept as a String - assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().names().indexName()), equalTo(expected)); + assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().name()), equalTo(expected)); } public void testFrDetection() throws Exception { @@ -122,6 +122,6 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa ParseContext.Document doc = docMapper.parse("person", "person", "1", xcb.bytes()).rootDoc(); // Our mapping should be kept as a String - assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().names().indexName()), equalTo("en")); + assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().name()), equalTo("en")); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index 0a0f69edcfd..b44a6d55eb9 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -65,21 +65,21 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file.name").fieldType().names().indexName()), equalTo(filename)); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file.name").fieldType().name()), equalTo(filename)); if (expectedDate == null) { - assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().names().indexName()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().name()), nullValue()); } else { - assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().names().indexName()).numericValue().longValue(), is(expectedDate)); + assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().name()).numericValue().longValue(), is(expectedDate)); } - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("text/html;")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("text/html;")); if (expectedLength == null) { - assertNull(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue()); + assertNull(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().name()).numericValue().longValue()); } else { - assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 7618c4de5a1..fd5f480700c 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.mapper.attachments; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -52,9 +50,9 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); // re-parse it String builtMapping = docMapper.mappingSource().string(); @@ -64,23 +62,9 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); - } - - public void testContentBackcompat() throws Exception { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(), - getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); - byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); - - BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); - - ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get("file"), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); } /** @@ -95,9 +79,9 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); // re-parse it String builtMapping = docMapper.mappingSource().string(); @@ -107,9 +91,9 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); } /** diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java index 137980c6b36..217d48a8565 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java @@ -135,7 +135,7 @@ public class StandaloneRunner extends CliTool { } private void printMetadataContent(ParseContext.Document doc, String field) { - terminal.println("- %s: %s", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().names().indexName())); + terminal.println("- %s: %s", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().name())); } public static byte[] copyToBytes(Path path) throws IOException { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index 94beb54d281..9475c85a5f4 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -156,8 +156,8 @@ public class VariousDocTests extends AttachmentUnitTestCase { ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); if (!errorExpected) { - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), not(isEmptyOrNullString())); - logger.debug("-> extracted content: {}", doc.get(docMapper.mappers().getMapper("file").fieldType().names().indexName())); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), not(isEmptyOrNullString())); + logger.debug("-> extracted content: {}", doc.get(docMapper.mappers().getMapper("file").fieldType().name())); logger.debug("-> extracted metadata:"); printMetadataContent(doc, AUTHOR); printMetadataContent(doc, CONTENT_LENGTH); @@ -171,6 +171,6 @@ public class VariousDocTests extends AttachmentUnitTestCase { } private void printMetadataContent(ParseContext.Document doc, String field) { - logger.debug("- [{}]: [{}]", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().names().indexName())); + logger.debug("- [{}]: [{}]", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().name())); } } diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 1e27e18bac7..3fa9f1ffe1e 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -54,7 +54,7 @@ public class SizeFieldMapper extends MetadataFieldMapper { static { SIZE_FIELD_TYPE.setStored(true); SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT); - SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + SIZE_FIELD_TYPE.setName(NAME); SIZE_FIELD_TYPE.setIndexAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_32_BIT)); SIZE_FIELD_TYPE.setSearchAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE)); SIZE_FIELD_TYPE.freeze(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 7fec7e8e4ac..796872bd350 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -548,18 +548,11 @@ public class TestSearchContext extends SearchContext { return null; } + @Override public MappedFieldType smartNameFieldType(String name) { if (mapperService() != null) { - return mapperService().smartNameFieldType(name, types()); - } - return null; - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - if (mapperService() != null) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } return null; } @@ -567,7 +560,7 @@ public class TestSearchContext extends SearchContext { @Override public ObjectMapper getObjectMapper(String name) { if (mapperService() != null) { - return mapperService().getObjectMapper(name, types); + return mapperService().getObjectMapper(name); } return null; } From f40b72633b0a2c91852aabc1688b4e4125c97c81 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 23 Dec 2015 17:45:46 +0100 Subject: [PATCH 239/322] Add sub-fields support to `bool` fields. `bool` is our only core mapper that does not support sub fields. Close #6587 --- .../index/mapper/core/BooleanFieldMapper.java | 3 +++ .../mapper/core/BooleanFieldMapperTests.java | 24 +++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index d5440f227f4..76f8eb34a71 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -43,6 +43,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.booleanField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** * A field mapper for boolean fields. @@ -107,6 +108,8 @@ public class BooleanFieldMapper extends FieldMapper { } builder.nullValue(nodeBooleanValue(propNode)); iterator.remove(); + } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { + iterator.remove(); } } return builder; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 2bf97a4885c..3aa04ba0f01 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -110,4 +111,27 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { builder.endObject(); assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string()); } + + public void testMultiFields() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "boolean") + .startObject("fields") + .startObject("as_string") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + assertEquals(mapping, mapper.mappingSource().toString()); + BytesReference source = XContentFactory.jsonBuilder() + .startObject() + .field("field", false) + .endObject().bytes(); + ParsedDocument doc = mapper.parse("test", "type", "1", source); + assertNotNull(doc.rootDoc().getField("field.as_string")); + } } From 6ab922c5a0d9b1ced8032c21c2446031dc7d9c89 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 23 Dec 2015 16:07:13 -0500 Subject: [PATCH 240/322] Reduce runtime of CidrsTests#testValidCombinations This commit reduces the running time of CidrsTests#testValidCombinations by hoisting some costly repeated operations outside of the inner test loop. --- .../test/java/org/elasticsearch/common/network/CidrsTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java index 7109b2db02c..0b00353f98a 100644 --- a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java @@ -133,8 +133,9 @@ public class CidrsTests extends ESTestCase { public void testValidCombinations() { for (long i = 0; i < (1 << 16); i++) { + String octetsString = Cidrs.octetsToString(Cidrs.longToOctets(i << 16)); for (int mask = 16; mask <= 32; mask++) { - String test = Cidrs.octetsToCIDR(Cidrs.longToOctets(i << 16), mask); + String test = octetsString + "/" + mask; long[] actual = Cidrs.cidrMaskToMinMax(test); assertNotNull(test, actual); assertEquals(test, 2, actual.length); From f14a21639c182f59ef7d9c4c2a0e29e807e5ff43 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 23 Dec 2015 18:15:33 -0500 Subject: [PATCH 241/322] add cleanups from simon --- .../repositories/hdfs/HdfsRepository.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index e0fe49498d6..1e8e267bd41 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -55,9 +55,13 @@ public final class HdfsRepository extends BlobStoreRepository { private final RepositorySettings repositorySettings; private final ByteSizeValue chunkSize; private final boolean compress; - + private HdfsBlobStore blobStore; + // buffer size passed to HDFS read/write methods + // TODO: why 100KB? + private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB); + @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); @@ -70,11 +74,11 @@ public final class HdfsRepository extends BlobStoreRepository { @Override protected void doStart() { String uriSetting = repositorySettings.settings().get("uri"); - if (!Strings.hasText(uriSetting)) { + if (Strings.hasText(uriSetting) == false) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } URI uri = URI.create(uriSetting); - if (!"hdfs".equalsIgnoreCase(uri.getScheme())) { + if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) { throw new IllegalArgumentException( String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting)); } @@ -89,7 +93,7 @@ public final class HdfsRepository extends BlobStoreRepository { throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } - int bufferSize = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); + int bufferSize = repositorySettings.settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt(); try { // initialize our filecontext From 3015eb3088c93b6833e2ec19a866fc1904b04eaf Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 23 Dec 2015 16:58:02 +0100 Subject: [PATCH 242/322] Improve cross-type dynamic mapping updates. Today when dynamically mapping a field that is already defined in another type, we use the regular dynamic mapping logic and try to copy some settings to avoid introducing conflicts. However this is quite fragile as we don't deal with every existing setting. This proposes a different approach that will just reuse the shared field type. Close #15568 --- .../index/mapper/DocumentParser.java | 30 ++------ .../index/mapper/FieldMapper.java | 2 + .../index/mapper/DynamicMappingTests.java | 71 ++++++++++++++++--- 3 files changed, 68 insertions(+), 35 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 4eb3100c99c..dd89ad37b44 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -596,40 +596,22 @@ class DocumentParser implements Closeable { if (dynamic == ObjectMapper.Dynamic.FALSE) { return null; } + final String path = context.path().pathAsText(currentFieldName); final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); - final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().pathAsText(currentFieldName)); + final MappedFieldType existingFieldType = context.mapperService().fullName(path); Mapper.Builder builder = null; if (existingFieldType != null) { // create a builder of the same type builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName); - if (builder != null) { - // best-effort to not introduce a conflict - if (builder instanceof StringFieldMapper.Builder) { - StringFieldMapper.Builder stringBuilder = (StringFieldMapper.Builder) builder; - stringBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings()); - stringBuilder.store(existingFieldType.stored()); - stringBuilder.indexOptions(existingFieldType.indexOptions()); - stringBuilder.tokenized(existingFieldType.tokenized()); - stringBuilder.omitNorms(existingFieldType.omitNorms()); - stringBuilder.docValues(existingFieldType.hasDocValues()); - stringBuilder.indexAnalyzer(existingFieldType.indexAnalyzer()); - stringBuilder.searchAnalyzer(existingFieldType.searchAnalyzer()); - } else if (builder instanceof NumberFieldMapper.Builder) { - NumberFieldMapper.Builder numberBuilder = (NumberFieldMapper.Builder) builder; - numberBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings()); - numberBuilder.store(existingFieldType.stored()); - numberBuilder.indexOptions(existingFieldType.indexOptions()); - numberBuilder.tokenized(existingFieldType.tokenized()); - numberBuilder.omitNorms(existingFieldType.omitNorms()); - numberBuilder.docValues(existingFieldType.hasDocValues()); - numberBuilder.precisionStep(existingFieldType.numericPrecisionStep()); - } - } } if (builder == null) { builder = createBuilderFromDynamicValue(context, token, currentFieldName); } Mapper mapper = builder.build(builderContext); + if (existingFieldType != null) { + // try to not introduce a conflict + mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType)); + } mapper = parseAndMergeUpdate(mapper, context); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 3ab0ec86303..9bf58f6107f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -363,6 +363,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name()); if (newFieldType == null) { throw new IllegalStateException(); + } else if (fieldType.getClass() != newFieldType.getClass()) { + throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass()); } MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType); if (fieldType == newFieldType && multiFields == updatedMultiFields) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index d38e458248a..96e5a2fe80e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -30,8 +31,13 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.FloatFieldMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -367,17 +373,52 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { } public void testReuseExistingMappings() throws IOException, Exception { - IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=string,store=yes", "my_field2", "type=integer,precision_step=10"); + IndexService indexService = createIndex("test", Settings.EMPTY, "type", + "my_field1", "type=string,store=yes", + "my_field2", "type=integer,precision_step=10", + "my_field3", "type=long,doc_values=false", + "my_field4", "type=float,index_options=freqs", + "my_field5", "type=double,precision_step=14", + "my_field6", "type=date,doc_values=false"); // Even if the dynamic type of our new field is long, we already have a mapping for the same field // of type string so it should be mapped as a string DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper(); Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(), - XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject()); + XContentFactory.jsonBuilder().startObject() + .field("my_field1", 42) + .field("my_field2", 43) + .field("my_field3", 44) + .field("my_field4", 45) + .field("my_field5", 46) + .field("my_field6", 47) + .endObject()); Mapper myField1Mapper = null; + Mapper myField2Mapper = null; + Mapper myField3Mapper = null; + Mapper myField4Mapper = null; + Mapper myField5Mapper = null; + Mapper myField6Mapper = null; for (Mapper m : update) { - if (m.name().equals("my_field1")) { + switch (m.name()) { + case "my_field1": myField1Mapper = m; + break; + case "my_field2": + myField2Mapper = m; + break; + case "my_field3": + myField3Mapper = m; + break; + case "my_field4": + myField4Mapper = m; + break; + case "my_field5": + myField5Mapper = m; + break; + case "my_field6": + myField6Mapper = m; + break; } } assertNotNull(myField1Mapper); @@ -388,20 +429,28 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer // since we already have a mapping of type integer - update = parse(newMapper, indexService.mapperService().documentMapperParser(), - XContentFactory.jsonBuilder().startObject().field("my_field2", 42).endObject()); - Mapper myField2Mapper = null; - for (Mapper m : update) { - if (m.name().equals("my_field2")) { - myField2Mapper = m; - } - } assertNotNull(myField2Mapper); // same type assertTrue(myField2Mapper instanceof IntegerFieldMapper); // and same option assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep()); + assertNotNull(myField3Mapper); + assertTrue(myField3Mapper instanceof LongFieldMapper); + assertFalse(((LongFieldType) ((LongFieldMapper) myField3Mapper).fieldType()).hasDocValues()); + + assertNotNull(myField4Mapper); + assertTrue(myField4Mapper instanceof FloatFieldMapper); + assertEquals(IndexOptions.DOCS_AND_FREQS, ((FieldMapper) myField4Mapper).fieldType().indexOptions()); + + assertNotNull(myField5Mapper); + assertTrue(myField5Mapper instanceof DoubleFieldMapper); + assertEquals(14, ((DoubleFieldMapper) myField5Mapper).fieldType().numericPrecisionStep()); + + assertNotNull(myField6Mapper); + assertTrue(myField6Mapper instanceof DateFieldMapper); + assertFalse(((DateFieldType) ((DateFieldMapper) myField6Mapper).fieldType()).hasDocValues()); + // This can't work try { parse(newMapper, indexService.mapperService().documentMapperParser(), From af122f4151a95eb17b91ab19df8a08b98419669b Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 24 Dec 2015 13:47:37 +0100 Subject: [PATCH 243/322] Remove mapping backward compatibilit with pre-2.0. This removes the backward compatibility layer with pre-2.0 indices, notably the extraction of _id, _routing or _timestamp from the source document when a path is defined. --- .../action/index/IndexRequest.java | 37 -- .../cluster/metadata/MappingMetaData.java | 387 +-------------- .../index/analysis/AnalysisService.java | 2 +- .../analysis/CustomAnalyzerProvider.java | 2 +- .../index/mapper/DocumentMapperParser.java | 6 +- .../index/mapper/DocumentParser.java | 9 +- .../index/mapper/FieldMapper.java | 8 +- .../index/mapper/MapperService.java | 18 +- .../elasticsearch/index/mapper/Mapping.java | 10 - .../index/mapper/core/BinaryFieldMapper.java | 14 - .../index/mapper/core/DateFieldMapper.java | 13 +- .../index/mapper/core/StringFieldMapper.java | 14 +- .../index/mapper/core/TypeParsers.java | 18 +- .../mapper/geo/GeoPointFieldMapperLegacy.java | 21 +- .../index/mapper/geo/GeoShapeFieldMapper.java | 10 +- .../index/mapper/internal/AllFieldMapper.java | 3 - .../internal/FieldNamesFieldMapper.java | 24 - .../index/mapper/internal/IdFieldMapper.java | 81 +--- .../mapper/internal/IndexFieldMapper.java | 31 +- .../mapper/internal/ParentFieldMapper.java | 3 - .../mapper/internal/RoutingFieldMapper.java | 42 +- .../mapper/internal/TimestampFieldMapper.java | 110 +---- .../mapper/internal/TypeFieldMapper.java | 28 +- .../index/mapper/internal/UidFieldMapper.java | 25 +- .../mapper/internal/VersionFieldMapper.java | 14 +- .../index/mapper/object/RootObjectMapper.java | 8 - .../metadata/MappingMetaDataParserTests.java | 340 ------------- .../org/elasticsearch/get/GetActionIT.java | 76 --- .../index/mapper/MapperServiceTests.java | 22 - .../mapper/all/SimpleAllMapperTests.java | 13 - .../date/DateBackwardsCompatibilityTests.java | 207 -------- .../mapper/date/SimpleDateMappingTests.java | 98 ---- .../mapper/geo/GeoPointFieldMapperTests.java | 54 --- .../mapper/geo/GeoShapeFieldMapperTests.java | 2 +- .../index/mapper/id/IdMappingTests.java | 51 -- .../mapper/index/IndexTypeMapperTests.java | 80 ---- .../internal/FieldNamesFieldMapperTests.java | 41 -- .../mapper/parent/ParentMappingTests.java | 21 - .../routing/RoutingTypeMapperTests.java | 82 ---- .../string/SimpleStringMappingTests.java | 20 - .../timestamp/TimestampMappingTests.java | 452 +----------------- .../index/mapper/ttl/TTLMappingTests.java | 30 -- .../mapper/update/UpdateMappingTests.java | 21 - .../routing/SimpleRoutingIT.java | 101 ---- .../search/query/SearchQueryIT.java | 85 +--- .../elasticsearch/messy/tests/BulkTests.java | 85 ---- .../index/mapper/size/SizeFieldMapper.java | 5 +- 47 files changed, 76 insertions(+), 2748 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 21c643e7ae0..9899a5493aa 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.index; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -43,11 +42,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import java.io.IOException; @@ -605,41 +602,7 @@ public class IndexRequest extends ReplicationRequest implements Do mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER, getVersion(metaData, concreteIndex)); } - // extract values if needed if (mappingMd != null) { - MappingMetaData.ParseContext parseContext = mappingMd.createParseContext(id, routing, timestamp); - - if (parseContext.shouldParse()) { - XContentParser parser = null; - try { - parser = XContentHelper.createParser(source); - mappingMd.parse(parser, parseContext); - if (parseContext.shouldParseId()) { - id = parseContext.id(); - } - if (parseContext.shouldParseRouting()) { - if (routing != null && !routing.equals(parseContext.routing())) { - throw new MapperParsingException("The provided routing value [" + routing + "] doesn't match the routing key stored in the document: [" + parseContext.routing() + "]"); - } - routing = parseContext.routing(); - } - if (parseContext.shouldParseTimestamp()) { - timestamp = parseContext.timestamp(); - if (timestamp != null) { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter(), getVersion(metaData, concreteIndex)); - } - } - } catch (MapperParsingException e) { - throw e; - } catch (Exception e) { - throw new ElasticsearchParseException("failed to parse doc to extract routing/timestamp/id", e); - } finally { - if (parser != null) { - parser.close(); - } - } - } - // might as well check for routing here if (mappingMd.routing().required() && routing == null) { throw new RoutingMissingException(concreteIndex, type, id); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 4e3c19430e9..a26e95c40e0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -50,92 +50,20 @@ public class MappingMetaData extends AbstractDiffable { public static final MappingMetaData PROTO = new MappingMetaData(); - public static class Id { - - public static final Id EMPTY = new Id(null); - - private final String path; - - private final String[] pathElements; - - public Id(String path) { - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } - } - - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Id id = (Id) o; - - if (path != null ? !path.equals(id.path) : id.path != null) return false; - if (!Arrays.equals(pathElements, id.pathElements)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = path != null ? path.hashCode() : 0; - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); - return result; - } - } - public static class Routing { - public static final Routing EMPTY = new Routing(false, null); + public static final Routing EMPTY = new Routing(false); private final boolean required; - private final String path; - - private final String[] pathElements; - - public Routing(boolean required, String path) { + public Routing(boolean required) { this.required = required; - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } } public boolean required() { return required; } - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -143,19 +71,12 @@ public class MappingMetaData extends AbstractDiffable { Routing routing = (Routing) o; - if (required != routing.required) return false; - if (path != null ? !path.equals(routing.path) : routing.path != null) return false; - if (!Arrays.equals(pathElements, routing.pathElements)) return false; - - return true; + return required == routing.required; } @Override public int hashCode() { - int result = (required ? 1 : 0); - result = 31 * result + (path != null ? path.hashCode() : 0); - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); - return result; + return getClass().hashCode() + (required ? 1 : 0); } } @@ -182,31 +103,21 @@ public class MappingMetaData extends AbstractDiffable { } - public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, + public static final Timestamp EMPTY = new Timestamp(false, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null); private final boolean enabled; - private final String path; - private final String format; - private final String[] pathElements; - private final FormatDateTimeFormatter dateTimeFormatter; private final String defaultTimestamp; private final Boolean ignoreMissing; - public Timestamp(boolean enabled, String path, String format, String defaultTimestamp, Boolean ignoreMissing) { + public Timestamp(boolean enabled, String format, String defaultTimestamp, Boolean ignoreMissing) { this.enabled = enabled; - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } this.format = format; this.dateTimeFormatter = Joda.forPattern(format); this.defaultTimestamp = defaultTimestamp; @@ -217,18 +128,6 @@ public class MappingMetaData extends AbstractDiffable { return enabled; } - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - public String format() { return this.format; } @@ -258,10 +157,8 @@ public class MappingMetaData extends AbstractDiffable { if (enabled != timestamp.enabled) return false; if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; - if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false; if (defaultTimestamp != null ? !defaultTimestamp.equals(timestamp.defaultTimestamp) : timestamp.defaultTimestamp != null) return false; if (ignoreMissing != null ? !ignoreMissing.equals(timestamp.ignoreMissing) : timestamp.ignoreMissing != null) return false; - if (!Arrays.equals(pathElements, timestamp.pathElements)) return false; return true; } @@ -269,9 +166,7 @@ public class MappingMetaData extends AbstractDiffable { @Override public int hashCode() { int result = (enabled ? 1 : 0); - result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (format != null ? format.hashCode() : 0); - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); result = 31 * result + (defaultTimestamp != null ? defaultTimestamp.hashCode() : 0); result = 31 * result + (ignoreMissing != null ? ignoreMissing.hashCode() : 0); @@ -283,7 +178,6 @@ public class MappingMetaData extends AbstractDiffable { private final CompressedXContent source; - private Id id; private Routing routing; private Timestamp timestamp; private boolean hasParentField; @@ -291,9 +185,8 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); - this.id = new Id(docMapper.idFieldMapper().path()); - this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); - this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), + this.routing = new Routing(docMapper.routingFieldMapper().required()); + this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); @@ -337,40 +230,22 @@ public class MappingMetaData extends AbstractDiffable { } private void initMappers(Map withoutType) { - if (withoutType.containsKey("_id")) { - String path = null; - Map routingNode = (Map) withoutType.get("_id"); - for (Map.Entry entry : routingNode.entrySet()) { - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path")) { - path = fieldNode.toString(); - } - } - this.id = new Id(path); - } else { - this.id = Id.EMPTY; - } if (withoutType.containsKey("_routing")) { boolean required = false; - String path = null; Map routingNode = (Map) withoutType.get("_routing"); for (Map.Entry entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { required = nodeBooleanValue(fieldNode); - } else if (fieldName.equals("path")) { - path = fieldNode.toString(); } } - this.routing = new Routing(required, path); + this.routing = new Routing(required); } else { this.routing = Routing.EMPTY; } if (withoutType.containsKey("_timestamp")) { boolean enabled = false; - String path = null; String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; Boolean ignoreMissing = null; @@ -380,8 +255,6 @@ public class MappingMetaData extends AbstractDiffable { Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { enabled = nodeBooleanValue(fieldNode); - } else if (fieldName.equals("path")) { - path = fieldNode.toString(); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } else if (fieldName.equals("default") && fieldNode != null) { @@ -390,7 +263,7 @@ public class MappingMetaData extends AbstractDiffable { ignoreMissing = nodeBooleanValue(fieldNode); } } - this.timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing); + this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); } else { this.timestamp = Timestamp.EMPTY; } @@ -401,19 +274,15 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Routing routing, Timestamp timestamp, boolean hasParentField) { this.type = type; this.source = source; - this.id = id; this.routing = routing; this.timestamp = timestamp; this.hasParentField = hasParentField; } void updateDefaultMapping(MappingMetaData defaultMapping) { - if (id == Id.EMPTY) { - id = defaultMapping.id(); - } if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } @@ -453,10 +322,6 @@ public class MappingMetaData extends AbstractDiffable { return sourceAsMap(); } - public Id id() { - return this.id; - } - public Routing routing() { return this.routing; } @@ -465,114 +330,14 @@ public class MappingMetaData extends AbstractDiffable { return this.timestamp; } - public ParseContext createParseContext(@Nullable String id, @Nullable String routing, @Nullable String timestamp) { - // We parse the routing even if there is already a routing key in the request in order to make sure that - // they are the same - return new ParseContext( - id == null && id().hasPath(), - routing().hasPath(), - timestamp == null && timestamp().hasPath() - ); - } - - public void parse(XContentParser parser, ParseContext parseContext) throws IOException { - innerParse(parser, parseContext); - } - - private void innerParse(XContentParser parser, ParseContext context) throws IOException { - if (!context.parsingStillNeeded()) { - return; - } - - XContentParser.Token token = parser.currentToken(); - if (token == null) { - token = parser.nextToken(); - } - if (token == XContentParser.Token.START_OBJECT) { - token = parser.nextToken(); - } - String idPart = context.idParsingStillNeeded() ? id().pathElements()[context.locationId] : null; - String routingPart = context.routingParsingStillNeeded() ? routing().pathElements()[context.locationRouting] : null; - String timestampPart = context.timestampParsingStillNeeded() ? timestamp().pathElements()[context.locationTimestamp] : null; - - for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { - // Must point to field name - String fieldName = parser.currentName(); - // And then the value... - token = parser.nextToken(); - boolean incLocationId = false; - boolean incLocationRouting = false; - boolean incLocationTimestamp = false; - if (context.idParsingStillNeeded() && fieldName.equals(idPart)) { - if (context.locationId + 1 == id.pathElements().length) { - if (!token.isValue()) { - throw new MapperParsingException("id field must be a value but was either an object or an array"); - } - context.id = parser.textOrNull(); - context.idResolved = true; - } else { - incLocationId = true; - } - } - if (context.routingParsingStillNeeded() && fieldName.equals(routingPart)) { - if (context.locationRouting + 1 == routing.pathElements().length) { - context.routing = parser.textOrNull(); - context.routingResolved = true; - } else { - incLocationRouting = true; - } - } - if (context.timestampParsingStillNeeded() && fieldName.equals(timestampPart)) { - if (context.locationTimestamp + 1 == timestamp.pathElements().length) { - context.timestamp = parser.textOrNull(); - context.timestampResolved = true; - } else { - incLocationTimestamp = true; - } - } - - if (incLocationId || incLocationRouting || incLocationTimestamp) { - if (token == XContentParser.Token.START_OBJECT) { - context.locationId += incLocationId ? 1 : 0; - context.locationRouting += incLocationRouting ? 1 : 0; - context.locationTimestamp += incLocationTimestamp ? 1 : 0; - innerParse(parser, context); - context.locationId -= incLocationId ? 1 : 0; - context.locationRouting -= incLocationRouting ? 1 : 0; - context.locationTimestamp -= incLocationTimestamp ? 1 : 0; - } - } else { - parser.skipChildren(); - } - - if (!context.parsingStillNeeded()) { - return; - } - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); - // id - if (id().hasPath()) { - out.writeBoolean(true); - out.writeString(id().path()); - } else { - out.writeBoolean(false); - } // routing out.writeBoolean(routing().required()); - if (routing().hasPath()) { - out.writeBoolean(true); - out.writeString(routing().path()); - } else { - out.writeBoolean(false); - } // timestamp out.writeBoolean(timestamp().enabled()); - out.writeOptionalString(timestamp().path()); out.writeString(timestamp().format()); out.writeOptionalString(timestamp().defaultTimestamp()); out.writeOptionalBoolean(timestamp().ignoreMissing()); @@ -586,7 +351,6 @@ public class MappingMetaData extends AbstractDiffable { MappingMetaData that = (MappingMetaData) o; - if (!id.equals(that.id)) return false; if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; if (!timestamp.equals(that.timestamp)) return false; @@ -599,7 +363,6 @@ public class MappingMetaData extends AbstractDiffable { public int hashCode() { int result = type.hashCode(); result = 31 * result + source.hashCode(); - result = 31 * result + id.hashCode(); result = 31 * result + routing.hashCode(); result = 31 * result + timestamp.hashCode(); return result; @@ -608,142 +371,20 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readString(); CompressedXContent source = CompressedXContent.readCompressedString(in); - // id - Id id = new Id(in.readBoolean() ? in.readString() : null); // routing - Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null); + Routing routing = new Routing(in.readBoolean()); // timestamp boolean enabled = in.readBoolean(); - String path = in.readOptionalString(); String format = in.readString(); String defaultTimestamp = in.readOptionalString(); Boolean ignoreMissing = null; ignoreMissing = in.readOptionalBoolean(); - final Timestamp timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing); + final Timestamp timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); final boolean hasParentField = in.readBoolean(); - return new MappingMetaData(type, source, id, routing, timestamp, hasParentField); + return new MappingMetaData(type, source, routing, timestamp, hasParentField); } - public static class ParseContext { - final boolean shouldParseId; - final boolean shouldParseRouting; - final boolean shouldParseTimestamp; - - int locationId = 0; - int locationRouting = 0; - int locationTimestamp = 0; - boolean idResolved; - boolean routingResolved; - boolean timestampResolved; - String id; - String routing; - String timestamp; - - public ParseContext(boolean shouldParseId, boolean shouldParseRouting, boolean shouldParseTimestamp) { - this.shouldParseId = shouldParseId; - this.shouldParseRouting = shouldParseRouting; - this.shouldParseTimestamp = shouldParseTimestamp; - } - - /** - * The id value parsed, null if does not require parsing, or not resolved. - */ - public String id() { - return id; - } - - /** - * Does id parsing really needed at all? - */ - public boolean shouldParseId() { - return shouldParseId; - } - - /** - * Has id been resolved during the parsing phase. - */ - public boolean idResolved() { - return idResolved; - } - - /** - * Is id parsing still needed? - */ - public boolean idParsingStillNeeded() { - return shouldParseId && !idResolved; - } - - /** - * The routing value parsed, null if does not require parsing, or not resolved. - */ - public String routing() { - return routing; - } - - /** - * Does routing parsing really needed at all? - */ - public boolean shouldParseRouting() { - return shouldParseRouting; - } - - /** - * Has routing been resolved during the parsing phase. - */ - public boolean routingResolved() { - return routingResolved; - } - - /** - * Is routing parsing still needed? - */ - public boolean routingParsingStillNeeded() { - return shouldParseRouting && !routingResolved; - } - - /** - * The timestamp value parsed, null if does not require parsing, or not resolved. - */ - public String timestamp() { - return timestamp; - } - - /** - * Does timestamp parsing really needed at all? - */ - public boolean shouldParseTimestamp() { - return shouldParseTimestamp; - } - - /** - * Has timestamp been resolved during the parsing phase. - */ - public boolean timestampResolved() { - return timestampResolved; - } - - /** - * Is timestamp parsing still needed? - */ - public boolean timestampParsingStillNeeded() { - return shouldParseTimestamp && !timestampResolved; - } - - /** - * Do we really need parsing? - */ - public boolean shouldParse() { - return shouldParseId || shouldParseRouting || shouldParseTimestamp; - } - - /** - * Is parsing still needed? - */ - public boolean parsingStillNeeded() { - return idParsingStillNeeded() || routingParsingStillNeeded() || timestampParsingStillNeeded(); - } - } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 24da3c267c0..a042bbcb9f3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -78,7 +78,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable * and 100 afterwards so we override the positionIncrementGap if it * doesn't match here. */ - int overridePositionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(indexSettings.getIndexVersionCreated()); + int overridePositionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP; if (analyzerFactory instanceof CustomAnalyzerProvider) { ((CustomAnalyzerProvider) analyzerFactory).build(this); /* diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index 047e278f953..3c4768279f2 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -74,7 +74,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider fieldNodeMap, Version indexVersionCreated, String message) { if (!fieldNodeMap.isEmpty()) { - if (indexVersionCreated.onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(message + getRemainingFields(fieldNodeMap)); - } else { - logger.debug(message + "{}", getRemainingFields(fieldNodeMap)); - } + throw new MapperParsingException(message + getRemainingFields(fieldNodeMap)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index dd89ad37b44..c1362287d67 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -23,15 +23,12 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -123,8 +120,7 @@ class DocumentParser implements Closeable { // try to parse the next token, this should be null if the object is ended properly // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch) - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) - && source.parser() == null && parser != null) { + if (source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); if (token != null) { @@ -191,8 +187,7 @@ class DocumentParser implements Closeable { XContentParser parser = context.parser(); String currentFieldName = parser.currentName(); - if (atRoot && MapperService.isMetadataField(currentFieldName) && - Version.indexCreated(context.indexSettings()).onOrAfter(Version.V_2_0_0_beta1)) { + if (atRoot && MapperService.isMetadataField(currentFieldName)) { throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."); } XContentParser.Token token = parser.currentToken(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9bf58f6107f..2071caaaf69 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; @@ -231,10 +230,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build(); fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings)); } - boolean defaultDocValues = false; // pre 2.0 - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; - } + boolean defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; // backcompat for "fielddata: format: docvalues" for now... boolean fieldDataDocValues = fieldType.fieldDataType() != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings())); @@ -253,12 +249,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable { protected final MappedFieldType defaultFieldType; protected MultiFields multiFields; protected CopyTo copyTo; - protected final boolean indexCreatedBefore2x; protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName); assert indexSettings != null; - this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); fieldType.freeze(); this.fieldType = fieldType; defaultFieldType.freeze(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 59724913bad..187c056a14f 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -229,7 +229,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) { + if (mapper.type().length() > 255) { throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); } if (mapper.type().charAt(0) == '_') { @@ -245,11 +245,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); } if (typeNameStartsWithIllegalDot(mapper)) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); - } else { - logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); - } + throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); } // 1. compute the merged DocumentMapper @@ -362,16 +358,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - if (indexSettings.getIndexVersionCreated().before(Version.V_3_0_0)) { - // Before 3.0 some metadata mappers are also registered under the root object mapper - // So we avoid false positives by deduplicating mappers - // given that we check exact equality, this would still catch the case that a mapper - // is defined under the root object - Collection uniqueFieldMappers = Collections.newSetFromMap(new IdentityHashMap<>()); - uniqueFieldMappers.addAll(fieldMappers); - fieldMappers = uniqueFieldMappers; - } - final Set fieldNames = new HashSet<>(); for (FieldMapper fieldMapper : fieldMappers) { final String name = fieldMapper.name(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 25ea4b7d398..6f2fea6a3d4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -27,12 +27,9 @@ import org.elasticsearch.index.mapper.object.RootObjectMapper; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -43,10 +40,6 @@ import static java.util.Collections.unmodifiableMap; */ public final class Mapping implements ToXContent { - // Set of fields that were included into the root object mapper before 2.0 - public static final Set LEGACY_INCLUDE_IN_OBJECT = Collections.unmodifiableSet(new HashSet<>( - Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl"))); - final Version indexCreated; final RootObjectMapper root; final MetadataFieldMapper[] metadataMappers; @@ -58,9 +51,6 @@ public final class Mapping implements ToXContent { this.metadataMappers = metadataMappers; Map, MetadataFieldMapper> metadataMappersMap = new HashMap<>(); for (MetadataFieldMapper metadataMapper : metadataMappers) { - if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) { - rootObjectMapper = rootObjectMapper.copyAndPutMapper(metadataMapper); - } metadataMappersMap.put(metadataMapper.getClass(), metadataMapper); } this.root = rootObjectMapper; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 3c80106c4f0..f71267fa75b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -26,9 +26,7 @@ import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.Base64; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -41,7 +39,6 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; @@ -54,9 +51,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; public class BinaryFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "binary"; - private static final ParseField COMPRESS = new ParseField("compress").withAllDeprecated("no replacement, implemented at the codec level"); - private static final ParseField COMPRESS_THRESHOLD = new ParseField("compress_threshold").withAllDeprecated("no replacement"); - public static class Defaults { public static final MappedFieldType FIELD_TYPE = new BinaryFieldType(); @@ -87,14 +81,6 @@ public class BinaryFieldMapper extends FieldMapper { public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { BinaryFieldMapper.Builder builder = binaryField(name); parseField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1) && - (parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) { - iterator.remove(); - } - } return builder; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 1bd018f3f6b..a9174ea161a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -73,7 +73,6 @@ public class DateFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("date_optional_time", Locale.ROOT); public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; public static final DateFieldType FIELD_TYPE = new DateFieldType(); @@ -128,12 +127,6 @@ public class DateFieldMapper extends NumberFieldMapper { @Override protected void setupFieldType(BuilderContext context) { - if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0_beta1) && - !fieldType().dateTimeFormatter().format().contains("epoch_")) { - String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis"; - fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + fieldType().dateTimeFormatter().format())); - } - FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; if (!locale.equals(dateTimeFormatter.locale())) { fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); @@ -186,11 +179,7 @@ public class DateFieldMapper extends NumberFieldMapper { } } if (!configuredFormat) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); - } else { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER_BEFORE_2_0); - } + builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index f8f9cb25ba1..22394704754 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -69,19 +69,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc * values. */ public static final int POSITION_INCREMENT_GAP = 100; - public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0; public static final int IGNORE_ABOVE = -1; - - /** - * The default position_increment_gap for a particular version of Elasticsearch. - */ - public static int positionIncrementGap(Version version) { - if (version.before(Version.V_2_0_0_beta1)) { - return POSITION_INCREMENT_GAP_PRE_2_0; - } - return POSITION_INCREMENT_GAP; - } } public static class Builder extends FieldMapper.Builder { @@ -175,8 +164,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } builder.searchQuotedAnalyzer(analyzer); iterator.remove(); - } else if (propName.equals("position_increment_gap") || - parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) { + } else if (propName.equals("position_increment_gap")) { int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); if (newPositionIncrementGap < 0) { throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 3ebba17d65c..b036a43b786 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -193,9 +193,7 @@ public class TypeParsers { } else if (propName.equals("store_term_vector_payloads")) { builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0 - propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - + } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); @@ -272,26 +270,12 @@ public class TypeParsers { } DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated()); iterator.remove(); - } else if (propName.equals("omit_term_freq_and_positions")) { - final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; - if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) { - throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead"); - } - // deprecated option for BW compat - builder.indexOptions(op); - iterator.remove(); } else if (propName.equals("index_options")) { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - // ignore for old indexes - iterator.remove(); - } else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - // ignore for old indexes - iterator.remove(); } else if (propName.equals("similarity")) { builder.similarity(parserContext.getSimilarity(propNode.toString())); iterator.remove(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 589abd99191..8c954c06a5e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -127,30 +127,11 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } public static Builder parse(Builder builder, Map node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { - final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); - if (indexCreatedBeforeV2_0 && propName.equals("validate")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) { + if (propName.equals(Names.COERCE)) { builder.coerce = XContentMapValues.nodeBooleanValue(propNode); iterator.remove(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 9cb38e38a62..2ea59393ca0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -30,7 +30,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoUtils; @@ -96,8 +95,8 @@ public class GeoShapeFieldMapper extends FieldMapper { public static final boolean POINTS_ONLY = false; public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m"); public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m"); - public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; public static final Orientation ORIENTATION = Orientation.RIGHT; + public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; public static final Explicit COERCE = new Explicit<>(false, false); public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); @@ -147,12 +146,7 @@ public class GeoShapeFieldMapper extends FieldMapper { public GeoShapeFieldMapper build(BuilderContext context) { GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; - if (geoShapeFieldType.tree.equals(Names.TREE_QUADTREE) && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) { - geoShapeFieldType.setTree("legacyquadtree"); - } - - if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1) || - (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) { + if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) { geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT); } setupFieldType(context); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index d370cbe9645..d9a345caf28 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -154,9 +154,6 @@ public class AllFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); - } else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // Old 1.x setting which is now ignored - iterator.remove(); } } return builder; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index cd4fdc533e4..17d1c2b9f08 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -42,7 +41,6 @@ import java.util.Map; import java.util.Objects; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * A mapper that indexes the field names of a document under _field_names. This mapper is typically useful in order @@ -107,14 +105,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_1_3_0)) { - throw new IllegalArgumentException("type="+CONTENT_TYPE+" is not supported on indices created before version 1.3.0. Is your cluster running multiple datanode versions?"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -203,21 +194,12 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { } } - private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled - private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) { this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), indexSettings); } private FieldNamesFieldMapper(MappedFieldType fieldType, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); - this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0); - if (this.pre13Index) { - FieldNamesFieldType newFieldType = fieldType().clone(); - newFieldType.setEnabled(false); - newFieldType.freeze(); - this.fieldType = newFieldType; - } } @Override @@ -304,9 +286,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (pre13Index) { - return builder; - } boolean includeDefaults = params.paramAsBoolean("include_defaults", false); if (includeDefaults == false && fieldType().isEnabled() == Defaults.ENABLED) { @@ -317,9 +296,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) { builder.field("enabled", fieldType().isEnabled()); } - if (indexCreatedBefore2x && (includeDefaults || fieldType().equals(Defaults.FIELD_TYPE) == false)) { - super.doXContentBody(builder, includeDefaults, params); - } builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index a342f5b2180..a586a7b5b94 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -31,9 +31,7 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -51,12 +49,9 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; - /** * */ @@ -80,23 +75,15 @@ public class IdFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } - - public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder { - private String path = Defaults.PATH; - public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } - public Builder path(String path) { - this.path = path; - return builder; - } // if we are indexed we use DOCS @Override protected IndexOptions getDefaultIndexOption() { @@ -106,28 +93,14 @@ public class IdFieldMapper extends MetadataFieldMapper { @Override public IdFieldMapper build(BuilderContext context) { setupFieldType(context); - return new IdFieldMapper(fieldType, path, context.indexSettings()); + return new IdFieldMapper(fieldType, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path")) { - builder.path(fieldNode.toString()); - iterator.remove(); - } - } - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -229,31 +202,12 @@ public class IdFieldMapper extends MetadataFieldMapper { } } - private final String path; - private IdFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(idFieldType(indexSettings, existing), Defaults.PATH, indexSettings); + this(existing != null ? existing : Defaults.FIELD_TYPE, indexSettings); } - private IdFieldMapper(MappedFieldType fieldType, String path, Settings indexSettings) { + private IdFieldMapper(MappedFieldType fieldType, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); - this.path = path; - } - - private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) { - if (existing != null) { - return existing.clone(); - } - MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); - boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); - if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) { - fieldType.setTokenized(false); - } - return fieldType; - } - - public String path() { - return this.path; } @Override @@ -299,33 +253,6 @@ public class IdFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() - && fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions() - && path == Defaults.PATH - && hasCustomFieldDataSettings() == false) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { - builder.field("store", fieldType().stored()); - } - if (includeDefaults || fieldType().indexOptions() != Defaults.FIELD_TYPE.indexOptions()) { - builder.field("index", indexTokenizeOptionToString(fieldType().indexOptions() != IndexOptions.NONE, fieldType().tokenized())); - } - if (includeDefaults || path != Defaults.PATH) { - builder.field("path", path); - } - - if (includeDefaults || hasCustomFieldDataSettings()) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } - builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 52a2e97508c..d4aa2da4ab3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -24,9 +24,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -39,12 +37,9 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * @@ -99,23 +94,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - return builder; - } - - parseField(builder, builder.name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper mapper = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(mapper); - iterator.remove(); - } - } - return builder; + return new Builder(parserContext.mapperService().fullName(NAME)); } @Override @@ -260,19 +239,13 @@ public class IndexFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all defaults, no need to write it at all - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && hasCustomFieldDataSettings() == false) { + if (includeDefaults == false && enabledState == Defaults.ENABLED_STATE) { return builder; } builder.startObject(CONTENT_TYPE); - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 4325dfc8a96..abb9178b875 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -138,9 +138,6 @@ public class ParentFieldMapper extends MetadataFieldMapper { if (fieldName.equals("type")) { builder.type(fieldNode.toString()); iterator.remove(); - } else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // ignore before 2.0, reject on and after 2.0 - iterator.remove(); } else if (fieldName.equals("fielddata")) { // Only take over `loading`, since that is the only option now that is configurable: Map fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata")); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 4fb410dbdc8..e96bfef6f35 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -40,7 +39,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * @@ -67,15 +65,12 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } public static final boolean REQUIRED = false; - public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder { private boolean required = Defaults.REQUIRED; - private String path = Defaults.PATH; - public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); } @@ -85,14 +80,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper { return builder; } - public Builder path(String path) { - this.path = path; - return builder; - } - @Override public RoutingFieldMapper build(BuilderContext context) { - return new RoutingFieldMapper(fieldType, required, path, context.indexSettings()); + return new RoutingFieldMapper(fieldType, required, context.indexSettings()); } } @@ -100,9 +90,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); @@ -110,9 +97,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper { if (fieldName.equals("required")) { builder.required(nodeBooleanValue(fieldNode)); iterator.remove(); - } else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.path(fieldNode.toString()); - iterator.remove(); } } return builder; @@ -154,16 +138,14 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } private boolean required; - private final String path; private RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings); + this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, indexSettings); } - private RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) { + private RoutingFieldMapper(MappedFieldType fieldType, boolean required, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.required = required; - this.path = path; } public void markAsRequired() { @@ -174,10 +156,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper { return this.required; } - public String path() { - return this.path; - } - public String value(Document document) { Field field = (Field) document.getField(fieldType().name()); return field == null ? null : (String)fieldType().value(field); @@ -224,25 +202,13 @@ public class RoutingFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; - if (!includeDefaults && indexed == indexedDefault && - fieldType().stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) { + if (!includeDefaults && required == Defaults.REQUIRED) { return builder; } builder.startObject(CONTENT_TYPE); - if (indexCreatedBefore2x && (includeDefaults || indexed != indexedDefault)) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } if (includeDefaults || required != Defaults.REQUIRED) { builder.field("required", required); } - if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) { - builder.field("path", path); - } builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 3771747bf66..9ea6018a33c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -46,7 +45,6 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; public class TimestampFieldMapper extends MetadataFieldMapper { @@ -58,10 +56,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper { public static final String NAME = "_timestamp"; // TODO: this should be removed - public static final TimestampFieldType PRE_20_FIELD_TYPE; public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType(); public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("epoch_millis||dateOptionalTime"); static { FIELD_TYPE.setStored(true); @@ -73,34 +69,20 @@ public class TimestampFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE)); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.freeze(); - PRE_20_FIELD_TYPE = FIELD_TYPE.clone(); - PRE_20_FIELD_TYPE.setStored(false); - PRE_20_FIELD_TYPE.setHasDocValues(false); - PRE_20_FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER_BEFORE_2_0); - PRE_20_FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Defaults.PRECISION_STEP_64_BIT)); - PRE_20_FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Integer.MAX_VALUE)); - PRE_20_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; - public static final String PATH = null; public static final String DEFAULT_TIMESTAMP = "now"; } public static class Builder extends MetadataFieldMapper.Builder { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private String path = Defaults.PATH; private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; - private boolean explicitStore = false; private Boolean ignoreMissing = null; public Builder(MappedFieldType existing, Settings settings) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, chooseFieldType(settings, null)); - if (existing != null) { - // if there is an existing type, always use that store value (only matters for < 2.0) - explicitStore = true; - } + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); } @Override @@ -113,11 +95,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { return builder; } - public Builder path(String path) { - this.path = path; - return builder; - } - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { fieldType().setDateTimeFormatter(dateTimeFormatter); return this; @@ -135,42 +112,21 @@ public class TimestampFieldMapper extends MetadataFieldMapper { @Override public Builder store(boolean store) { - explicitStore = true; return super.store(store); } @Override public TimestampFieldMapper build(BuilderContext context) { - if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) { - fieldType.setStored(false); - } - - if (fieldType().dateTimeFormatter().equals(Defaults.DATE_TIME_FORMATTER)) { - fieldType().setDateTimeFormatter(getDateTimeFormatter(context.indexSettings())); - } - setupFieldType(context); - return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, path, defaultTimestamp, + return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, defaultTimestamp, ignoreMissing, context.indexSettings()); } } - private static FormatDateTimeFormatter getDateTimeFormatter(Settings indexSettings) { - Version indexCreated = Version.indexCreated(indexSettings); - if (indexCreated.onOrAfter(Version.V_2_0_0_beta1)) { - return Defaults.DATE_TIME_FORMATTER; - } else { - return Defaults.DATE_TIME_FORMATTER_BEFORE_2_0; - } - } - public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings()); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } boolean defaultSet = false; Boolean ignoreMissing = null; for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -181,23 +137,12 @@ public class TimestampFieldMapper extends MetadataFieldMapper { EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); - } else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.path(fieldNode.toString()); - iterator.remove(); } else if (fieldName.equals("format")) { builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString())); iterator.remove(); } else if (fieldName.equals("default")) { if (fieldNode == null) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_4_0_Beta1) && - parserContext.indexVersionCreated().before(Version.V_1_5_0)) { - // We are reading an index created in 1.4 with feature #7036 - // `default: null` was explicitly set. We need to change this index to - // `ignore_missing: false` - builder.ignoreMissing(false); - } else { - throw new TimestampParsingException("default timestamp can not be set to null"); - } + throw new TimestampParsingException("default timestamp can not be set to null"); } else { builder.defaultTimestamp(fieldNode.toString()); defaultSet = true; @@ -246,28 +191,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } } - private static MappedFieldType chooseFieldType(Settings settings, MappedFieldType existing) { - if (existing != null) { - return existing; - } - return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0_beta1) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE; - } - private EnabledAttributeMapper enabledState; - private final String path; private final String defaultTimestamp; private final Boolean ignoreMissing; private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null).clone(), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); + this(existing != null ? existing : Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, Defaults.ENABLED, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); } - private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path, + private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) { super(NAME, fieldType, defaultFieldType, indexSettings); this.enabledState = enabledState; - this.path = path; this.defaultTimestamp = defaultTimestamp; this.ignoreMissing = ignoreMissing; } @@ -281,10 +217,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { return this.enabledState.enabled; } - public String path() { - return this.path; - } - public String defaultTimestamp() { return this.defaultTimestamp; } @@ -332,35 +264,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; // if all are defaults, no sense to write it at all - if (!includeDefaults && indexed == indexedDefault && hasCustomFieldDataSettings() == false && - fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH + if (!includeDefaults && enabledState == Defaults.ENABLED && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) - && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp) - && defaultFieldType.hasDocValues() == fieldType().hasDocValues()) { + && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { return builder; } builder.startObject(CONTENT_TYPE); if (includeDefaults || enabledState != Defaults.ENABLED) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || (indexed != indexedDefault) || (fieldType().tokenized() != Defaults.FIELD_TYPE.tokenized()))) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.PRE_20_FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } - if (indexCreatedBefore2x) { - doXContentDocValues(builder, includeDefaults); - } - if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) { - builder.field("path", path); - } // different format handling depending on index version - String defaultDateFormat = indexCreatedBefore2x ? Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format() : Defaults.DATE_TIME_FORMATTER.format(); + String defaultDateFormat = Defaults.DATE_TIME_FORMATTER.format(); if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) { builder.field("format", fieldType().dateTimeFormatter().format()); } @@ -370,9 +286,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { if (includeDefaults || ignoreMissing != null) { builder.field("ignore_missing", ignoreMissing); } - if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) { - builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap()); - } builder.endObject(); return builder; @@ -396,13 +309,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); } - if (this.path != null) { - if (path.equals(timestampFieldMapperMergeWith.path()) == false) { - conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path())); - } - } else if (timestampFieldMapperMergeWith.path() != null) { - conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing"); - } if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Conflicts: " + conflicts); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index 05c13d01339..72defadf6fd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -47,8 +47,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; - /** * */ @@ -92,12 +90,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -199,25 +192,6 @@ public class TypeFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean defaultIndexed = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && indexed == defaultIndexed) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { - builder.field("store", fieldType().stored()); - } - if (includeDefaults || indexed != defaultIndexed) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index 532263b1506..828651409b1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -85,7 +85,6 @@ public class UidFieldMapper extends MetadataFieldMapper { @Override public UidFieldMapper build(BuilderContext context) { setupFieldType(context); - fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0_beta1)); return new UidFieldMapper(fieldType, defaultFieldType, context.indexSettings()); } } @@ -93,12 +92,7 @@ public class UidFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -203,23 +197,6 @@ public class UidFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if defaults, don't output - if (!includeDefaults && hasCustomFieldDataSettings() == false) { - return builder; - } - - builder.startObject(CONTENT_TYPE); - - if (includeDefaults || hasCustomFieldDataSettings()) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } - - builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index e9577b9d509..027b2ef05ff 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; -import org.elasticsearch.Version; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; @@ -35,7 +33,6 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; @@ -73,16 +70,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - if (fieldName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // ignore in 1.x, reject in 2.x - iterator.remove(); - } - } - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 8183a2179a2..64a60305b10 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -215,14 +215,6 @@ public class RootObjectMapper extends ObjectMapper { this.numericDetection = numericDetection; } - /** Return a copy of this mapper that has the given {@code mapper} as a - * sub mapper. */ - public RootObjectMapper copyAndPutMapper(Mapper mapper) { - RootObjectMapper clone = (RootObjectMapper) clone(); - clone.putMapper(mapper); - return clone; - } - @Override public ObjectMapper mappingUpdate(Mapper mapper) { RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java deleted file mode 100644 index e4462007c9a..00000000000 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.metadata; - -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.test.ESTestCase; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class MappingMetaDataParserTests extends ESTestCase { - public void testParseIdAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.idResolved(), equalTo(true)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testFailIfIdIsNoValue() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startArray("id").value("id").endArray().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1"); - try { - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - fail(); - } catch (MapperParsingException ex) { - // bogus its an array - } - - bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("id").field("x", "id").endObject().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - parseContext = md.createParseContext(null, "routing_value", "1"); - try { - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - fail(); - } catch (MapperParsingException ex) { - // bogus its an object - } - } - - public void testParseRoutingAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "1"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseTimestampAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", "routing_value1", null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), equalTo("1")); - assertThat(parseContext.timestampResolved(), equalTo(true)); - } - - public void testParseTimestampEquals() throws Exception { - MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - assertThat(md1, equalTo(md2)); - } - - public void testParseIdAndRoutingAndTimestamp() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "2"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.idResolved(), equalTo(true)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseRoutingWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "2"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value1", null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), equalTo("1")); - assertThat(parseContext.timestampResolved(), equalTo(true)); - } - - public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject() - .startObject("obj2").field("field1", "value1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.obj0.id"), - new MappingMetaData.Routing(true, "obj1.obj2.routing"), - new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1") - .startObject("obj0") - .field("id", "id") - .endObject() - .startObject("obj2") - .field("routing", "routing_value") - .endObject() - .startObject("obj3") - .field("timestamp", "1") - .endObject() - .endObject() - .startObject("obj2").field("field1", "value1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").endObject() - .startObject("obj1").field("routing", "routing_value").endObject() - .startObject("obj1").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdRoutingTimestampWithRepeatedField() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("field1"), - new MappingMetaData.Routing(true, "field1.field1"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .field("field1", "bar") - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("foo")); - assertThat(parseContext.routing(), nullValue()); - assertThat(parseContext.timestamp(), equalTo("foo")); - } - - public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "field1.field1.field2"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .startObject("field1").field("field2", "bar").endObject() - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.routing(), nullValue()); - assertThat(parseContext.timestamp(), equalTo("foo")); - } - - public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id(null), - new MappingMetaData.Routing(true, "field1.field2"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .startObject("field1").field("field2", "bar").endObject() - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.routing(), equalTo("bar")); - assertThat(parseContext.timestamp(), equalTo("foo")); - } -} diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 6d04836a810..cce4c0d22c4 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -242,25 +242,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1")); } - public void testRealtimeGetWithCompressBackcompat() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)) - .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("_source").field("compress", true).endObject().endObject().endObject())); - ensureGreen(); - - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < 10000; i++) { - sb.append((char) i); - } - String fieldValue = sb.toString(); - client().prepareIndex("test", "type", "1").setSource("field", fieldValue).get(); - - // realtime get - GetResponse getResponse = client().prepareGet("test", "type", "1").get(); - assertThat(getResponse.isExists(), equalTo(true)); - assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue)); - } - public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") @@ -948,63 +929,6 @@ public class GetActionIT extends ESIntegTestCase { assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } - public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabledBackcompat() throws IOException { - indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(false, false); - String[] fieldsList = {}; - // before refresh - document is only in translog - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - refresh(); - //after refresh - document is in translog and also indexed - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - flush(); - //after flush - document is in not anymore translog - only indexed - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - } - - public void testUngeneratedFieldsPartOfSourceEitherStoredOrSourceEnabledBackcompat() throws IOException { - boolean stored = randomBoolean(); - boolean sourceEnabled = true; - if (stored) { - sourceEnabled = randomBoolean(); - } - indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(stored, sourceEnabled); - String[] fieldsList = {}; - // before refresh - document is only in translog - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - refresh(); - //after refresh - document is in translog and also indexed - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - flush(); - //after flush - document is in not anymore translog - only indexed - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - } - - void indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; - String createIndexSource = "{\n" + - " \"settings\": {\n" + - " \"index.translog.flush_threshold_size\": \"1pb\",\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + - " },\n" + - " \"mappings\": {\n" + - " \"doc\": {\n" + - " \"_source\": {\n" + - " \"enabled\": " + sourceEnabled + "\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); - ensureGreen(); - String doc = "{\n" + - " \"my_boost\": 5.0,\n" + - " \"_ttl\": \"1h\"\n" + - "}\n"; - - client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get(); - } - public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 3c7a57f3849..035da8163ee 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -33,9 +31,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.test.VersionUtils.getFirstVersion; -import static org.elasticsearch.test.VersionUtils.getPreviousVersion; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.hasToString; @@ -58,23 +53,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase { .actionGet(); } - public void testThatLongTypeNameIsNotRejectedOnPreElasticsearchVersionTwo() { - String index = "text-index"; - String field = "field"; - String type = new String(new char[256]).replace("\0", "a"); - - CreateIndexResponse response = - client() - .admin() - .indices() - .prepareCreate(index) - .setSettings(settings(randomVersionBetween(random(), getFirstVersion(), getPreviousVersion(Version.V_2_0_0_beta1)))) - .addMapping(type, field, "type=string") - .execute() - .actionGet(); - assertNotNull(response); - } - public void testTypeNameTooLong() { String index = "text-index"; String field = "field"; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 341ba25fd9f..7b48f117444 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -456,19 +456,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } } - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_all", "foo").endObject().bytes()); - - assertNull(doc.rootDoc().get("_all")); - AllField field = (AllField) doc.rootDoc().getField("_all"); - // the backcompat behavior is actually ignoring directly specifying _all - assertFalse(field.getAllEntries().fields().iterator().hasNext()); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java deleted file mode 100644 index 8ddfc3a2ae7..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.date; - -import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.junit.Before; - -import java.util.Arrays; -import java.util.List; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; - -/** - * Test class to check for all the conditions defined in - * https://github.com/elastic/elasticsearch/issues/10971 - */ -public class DateBackwardsCompatibilityTests extends ESSingleNodeTestCase { - - private String index = "testindex"; - private String type = "testtype"; - private Version randomVersionBelow2x; - - @Before - public void setup() throws Exception { - randomVersionBelow2x = randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - } - - public void testThatPre2xIndicesNumbersAreTreatedAsEpochs() throws Exception { - index = createPre2xIndexAndMapping(); - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - XContentBuilder document = jsonBuilder().startObject().field("date_field", dateInMillis).endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 1); - } - - public void testThatPre2xFailedStringParsingLeadsToEpochParsing() throws Exception { - index = createPre2xIndexAndMapping(); - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - String date = String.valueOf(dateInMillis); - XContentBuilder document = jsonBuilder().startObject().field("date_field", date).endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 1); - } - - public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception { - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - List dateFormats = Arrays.asList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute"); - - for (String format : dateFormats) { - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format", format).endObject() - .endObject().endObject(); - - index = createIndex(randomVersionBelow2x, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", String.valueOf(dateInMillis)) - .endObject(); - index(document); - - // indexing as regular timestamp should work as well - document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", dateInMillis) - .endObject(); - index(document); - - client().admin().indices().prepareDelete(index).get(); - } - } - - public void testThatPre2xIndicesNumbersAreTreatedAsTimestamps() throws Exception { - // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format - long date = 2015062301000l; - - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() - .endObject().endObject(); - index = createIndex(randomVersionBelow2x, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", randomBoolean() ? String.valueOf(date) : date) - .endObject(); - index(document); - - // no results in expected time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertNoSearchHits(response); - - // result in unix timestamp range - QueryBuilder timestampQuery = QueryBuilders.rangeQuery("date_field").from(2015062300000L).to(2015062302000L); - assertHitCount(client().prepareSearch(index).setQuery(timestampQuery).get(), 1); - - // result should also work with regular specified dates - QueryBuilder regularTimeQuery = QueryBuilders.rangeQuery("date_field").from("2033-11-08").to("2033-11-09").format("dateOptionalTime"); - assertHitCount(client().prepareSearch(index).setQuery(regularTimeQuery).get(), 1); - } - - public void testThatPost2xIndicesNumbersAreTreatedAsStrings() throws Exception { - // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format - long date = 2015062301000l; - - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() - .endObject().endObject(); - index = createIndex(Version.CURRENT, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", String.valueOf(date)) - .endObject(); - index(document); - - document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", date) - .endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 2); - } - - public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception { - try { - XContentBuilder mapping = jsonBuilder().startObject() - .startArray("dynamic_date_formats").value("dateOptionalTime").value("epoch_seconds").endArray() - .endObject(); - createIndex(Version.CURRENT, mapping); - fail("Expected a MapperParsingException, but did not happen"); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Failed to parse mapping [" + type + "]")); - assertThat(e.getMessage(), containsString("Epoch [epoch_seconds] is not supported as dynamic date format")); - } - } - - private String createPre2xIndexAndMapping() throws Exception { - return createIndexAndMapping(randomVersionBelow2x); - } - - private String createIndexAndMapping(Version version) throws Exception { - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format", "dateOptionalTime").endObject() - .endObject().endObject(); - - return createIndex(version, mapping); - } - - private String createIndex(Version version, XContentBuilder mapping) { - Settings settings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - createIndex(index, settings, type, mapping); - - ensureGreen(index); - return index; - } - - private void index(XContentBuilder document) { - IndexResponse indexResponse = client().prepareIndex(index, type).setSource(document).setRefresh(true).get(); - assertThat(indexResponse.isCreated(), is(true)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 091c1ca2801..f9531c3aff3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -433,51 +433,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { throw new AssertionError("missing"); } - public void testNumericResolutionBackwardsCompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", mapping, Version.V_0_90_0); - - // provided as an int - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", 42) - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(42000L)); - - // provided as a string - doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "43") - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(43000L)); - - // but formatted dates still parse as milliseconds - doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1970-01-01T00:00:44.000Z") - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L)); - - // expected to fail due to field epoch date formatters not being set - DocumentMapper currentMapper = mapper("test2", "type", mapping); - try { - currentMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", randomBoolean() ? "43" : 43) - .endObject() - .bytes()); - fail("expected parse failure"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("failed to parse [date_field]")); - } - } - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() @@ -505,31 +460,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { assertThat(indexResponse.isCreated(), is(true)); } - public void testThatOlderIndicesAllowNonStrictDates() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - IndexService index = createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat("epoch_millis||date_optional_time"); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1-1-1T00:00:44.000Z") - .endObject() - .bytes()); - - // also test normal date - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject() - .bytes()); - } - public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() @@ -559,34 +489,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { } } - public void testThatUpgradingAnOlderIndexToStrictDateWorks() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_optional_time").endObject().endObject() - .endObject().endObject().string(); - - Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat("epoch_millis||date_optional_time"); - - // index doc - client().prepareIndex("test", "type", "1").setSource(XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject()).get(); - - // update mapping - String newMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field") - .field("type", "date") - .field("format", "strict_date_optional_time||epoch_millis") - .endObject().endObject().endObject().endObject().string(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(newMapping).get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - assertDateFormat("strict_date_optional_time||epoch_millis"); - } - private void assertDateFormat(String expectedFormat) throws IOException { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").setTypes("type").get(); Map mappingMap = response.getMappings().get("test").get("type").getSourceAsMap(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index af39f45fa8d..13028d08ee0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -653,60 +653,6 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - /** - * Test backward compatibility - */ - public void testBackwardCompatibleOptions() throws Exception { - // backward compatibility testing - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)).build(); - - // validate - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lat", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lon", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true")); - - // normalize - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lat", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lon", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true")); - } - public void testGeoPointMapperMerge() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index 76dba99e05b..af03d3accb1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -53,7 +53,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - assertThat(strategy.getDistErrPct(), equalTo(GeoShapeFieldMapper.Defaults.LEGACY_DISTANCE_ERROR_PCT)); + assertThat(strategy.getDistErrPct(), equalTo(0.025d)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS)); assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java index f8eaf89faab..da56d346ed5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/id/IdMappingTests.java @@ -64,57 +64,6 @@ public class IdMappingTests extends ESSingleNodeTestCase { assertTrue(e.getMessage().contains("No id found")); } } - - public void testIdIndexedBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_id").field("index", "not_analyzed").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get(UidFieldMapper.NAME), notNullValue()); - assertThat(doc.rootDoc().get(IdFieldMapper.NAME), notNullValue()); - } - - public void testIdPathBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_id").field("path", "my_path").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - // serialize the id mapping - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - builder = docMapper.idFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - String serialized_id_mapping = builder.string(); - - String expected_id_mapping = XContentFactory.jsonBuilder().startObject() - .startObject("_id").field("path", "my_path").endObject() - .endObject().string(); - - assertThat(serialized_id_mapping, equalTo(expected_id_mapping)); - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() - .startObject() - .field("_id", "1") - .endObject() - .bytes()).type("type")); - - // _id is not indexed so we need to check _uid - assertEquals(Uid.createUid("type", "1"), doc.rootDoc().get(UidFieldMapper.NAME)); - } public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java index d2065f439cb..b0476ae7ada 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/index/IndexTypeMapperTests.java @@ -19,59 +19,17 @@ package org.elasticsearch.index.mapper.index; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class IndexTypeMapperTests extends ESSingleNodeTestCase { - private Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - - public void testSimpleIndexMapperEnabledBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - IndexFieldMapper indexMapper = docMapper.indexMapper(); - assertThat(indexMapper.enabled(), equalTo(true)); - - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("_index"), equalTo("test")); - assertThat(doc.rootDoc().get("field"), equalTo("value")); - } - - public void testExplicitDisabledIndexMapperBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class); - assertThat(indexMapper.enabled(), equalTo(false)); - - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("_index"), nullValue()); - assertThat(doc.rootDoc().get("field"), equalTo("value")); - } public void testDefaultDisabledIndexMapper() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -89,42 +47,4 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("_index"), nullValue()); assertThat(doc.rootDoc().get("field"), equalTo("value")); } - - public void testThatMergingFieldMappingAllowsDisablingBackcompat() throws Exception { - String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", true).endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", bwcSettings).mapperService(); - DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(mappingWithIndexEnabled), true, false); - assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(true)); - - String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mappingWithIndexDisabled), false, false); - - assertThat(merged.IndexFieldMapper().enabled(), is(false)); - } - - public void testCustomSettingsBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index") - .field("enabled", true) - .field("store", "yes").endObject() - .endObject().endObject().string(); - - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class); - assertThat(indexMapper.enabled(), equalTo(true)); - assertThat(indexMapper.fieldType().stored(), equalTo(true)); - - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("_index"), equalTo("test")); - assertThat(doc.rootDoc().get("field"), equalTo("value")); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index c15ebae636d..6eb6dd75a68 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -141,45 +139,6 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { assertNull(doc.rootDoc().get("_field_names")); } - public void testPre13Disabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_2_4.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.fieldType().isEnabled()); - } - - public void testDisablingBackcompat() throws Exception { - // before 1.5, disabling happened by setting index:no - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_field_names").field("index", "no").endObject() - .endObject().endObject().string(); - - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.fieldType().isEnabled()); - - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes()); - - assertNull(doc.rootDoc().get("_field_names")); - } - - public void testFieldTypeSettingsBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_field_names").field("store", "yes").endObject() - .endObject().endObject().string(); - - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertTrue(fieldNamesMapper.fieldType().stored()); - } - public void testMergingMappings() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index 99c89ff542c..f6bbde47e9d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -18,10 +18,7 @@ */ package org.elasticsearch.index.mapper.parent; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -46,24 +43,6 @@ public class ParentMappingTests extends ESSingleNodeTestCase { } } - public void testParentSetInDocBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_parent").field("type", "p_type").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() - .startObject() - .field("_parent", "1122") - .field("x_field", "x_value") - .endObject() - .bytes()).type("type").id("1")); - - assertEquals("1122", doc.parent()); - assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent")); - } - public void testParentSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_parent").field("type", "p_type").endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java index 1f6ee7cce3b..a658948f022 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java @@ -19,31 +19,15 @@ package org.elasticsearch.index.mapper.routing; -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Map; - import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; public class RoutingTypeMapperTests extends ESSingleNodeTestCase { @@ -62,72 +46,6 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("field"), equalTo("value")); } - public void testFieldTypeSettingsBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing") - .field("store", "no") - .field("index", "no") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false)); - assertEquals(IndexOptions.NONE, docMapper.routingFieldMapper().fieldType().indexOptions()); - } - - public void testFieldTypeSettingsSerializationBackcompat() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing").field("store", "no").field("index", "no").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping)); - - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - enabledMapper.routingFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - assertThat(serializedMap, hasKey("_routing")); - assertThat(serializedMap.get("_routing"), instanceOf(Map.class)); - Map routingConfiguration = (Map) serializedMap.get("_routing"); - assertThat(routingConfiguration, hasKey("store")); - assertThat(routingConfiguration.get("store").toString(), is("false")); - assertThat(routingConfiguration, hasKey("index")); - assertThat(routingConfiguration.get("index").toString(), is("no")); - } - - public void testPathBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing").field("path", "custom_routing").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_routing", "routing_value").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - assertEquals(request.routing(), "routing_value"); - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - // _routing in a document never worked, so backcompat is ignoring the field - assertNull(request.routing()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_routing")); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 218fc442224..7bd4d9a78c3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -540,24 +540,4 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { } } - /** - * Test backward compatibility - */ - public void testBackwardCompatible() throws Exception { - - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)).build(); - - DocumentMapperParser parser = createIndex("backward_compatible_index", settings).mapperService().documentMapperParser(); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_offset_gap", 10) - .endObject().endObject().endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - - assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"position_increment_gap\":10")); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index c6a8a2fca44..51ef9ff0024 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -32,11 +32,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MappedFieldType; @@ -49,20 +46,13 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.Version.V_1_5_0; -import static org.elasticsearch.Version.V_2_0_0_beta1; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -71,7 +61,6 @@ import static org.hamcrest.Matchers.notNullValue; /** */ public class TimestampMappingTests extends ESSingleNodeTestCase { - Settings BWC_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); @@ -104,41 +93,23 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } public void testDefaultValues() throws Exception { - for (Version version : Arrays.asList(V_1_5_0, V_2_0_0_beta1, randomVersion(random()))) { - for (String mapping : Arrays.asList( - XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(), - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) { - DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); - assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH)); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - String expectedFormat = version.onOrAfter(Version.V_2_0_0_beta1) ? TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT : - TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format(); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(expectedFormat)); - assertAcked(client().admin().indices().prepareDelete("test").execute().get()); - } + Version version; + do { + version = randomVersion(random()); + } while (version.before(Version.V_2_0_0_beta1)); + for (String mapping : Arrays.asList( + XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(), + XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) { + DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); + assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); + assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); + assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } } - public void testBackcompatSetValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes").field("store", "no").field("index", "no") - .field("path", "timestamp").field("format", "year") - .field("doc_values", true) - .endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(true)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false)); - assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions()); - assertThat(docMapper.timestampFieldMapper().path(), equalTo("timestamp")); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo("year")); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(true)); - } - public void testThatDisablingDuringMergeIsWorking() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).endObject() @@ -155,55 +126,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); } - // issue 3174 - public void testThatSerializationWorksCorrectlyForIndexField() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("store", "yes").field("index", "no").endObject() - .endObject().endObject().string(); - DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping)); - - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - enabledMapper.timestampFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - assertThat(serializedMap, hasKey("_timestamp")); - assertThat(serializedMap.get("_timestamp"), instanceOf(Map.class)); - Map timestampConfiguration = (Map) serializedMap.get("_timestamp"); - assertThat(timestampConfiguration, hasKey("index")); - assertThat(timestampConfiguration.get("index").toString(), is("no")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - try { - request.process(metaData, mappingMetaData, true, "test"); - fail(); - } catch (TimestampParsingException e) { - assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping")); - } - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampDefaultValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -230,32 +152,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(delay, lessThanOrEqualTo(60000L)); } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingDefaultToEpochValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", "1970-01-01") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingDefaultToEpochValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -281,35 +177,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingNowDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", "now") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingNowDefaultValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -355,34 +222,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - try { - request.process(metaData, mappingMetaData, true, "test"); - fail("we should reject the mapping with a TimestampParsingException: timestamp is required by mapping"); - } catch (TimestampParsingException e) { - assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping")); - } - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -448,10 +287,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { public void testDefaultTimestampStream() throws IOException { // Testing null value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -465,10 +304,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // Testing "now" value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -482,10 +321,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // Testing "ignore_missing" value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -498,25 +337,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } } - public void testMergingFielddataLoadingWorks() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - MapperService mapperService = createIndex("test", indexSettings).mapperService(); - - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() - .endObject().endObject().string(); - - docMapper = mapperService.merge("type", new CompressedXContent(mapping), false, false); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); - } - public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") @@ -530,126 +350,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(docMapper.mappingSource().string(), equalTo(mapping)); } - public void testBackcompatParsingTwiceDoesNotChangeTokenizeValue() throws Exception { - String[] index_options = {"no", "analyzed", "not_analyzed"}; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("index", index_options[randomInt(2)]) - .field("store", true) - .field("path", "foo") - .field("default", "1970-01-01") - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject() - .startObject("properties") - .endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized(); - docMapper = parser.parse("type", docMapper.mappingSource()); - assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized())); - } - - public void testMergingConflicts() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("store", "yes") - .field("index", "analyzed") - .field("path", "foo") - .field("default", "1970-01-01") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - MapperService mapperService = createIndex("test", indexSettings).mapperService(); - - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", false) - .startObject("fielddata").field("format", "array").endObject() - .field("store", "no") - .field("index", "no") - .field("path", "foo") - .field("default", "1970-01-01") - .endObject() - .endObject().endObject().string(); - - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values")); - assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values")); - } - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", false) - .startObject("fielddata").field("format", "array").endObject() - .field("store", "yes") - .field("index", "analyzed") - .field("path", "bar") - .field("default", "1970-01-02") - .endObject() - .endObject().endObject().string(); - - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value")); - assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value")); - } - - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - assertTrue(docMapper.timestampFieldMapper().enabled()); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("store", "yes") - .field("index", "analyzed") - .field("path", "bar") - .field("default", "1970-01-02") - .endObject() - .endObject().endObject().string(); - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02")); - assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar")); - } - } - - public void testBackcompatMergingConflictsForIndexValues() throws Exception { - List indexValues = new ArrayList<>(); - indexValues.add("analyzed"); - indexValues.add("no"); - indexValues.add("not_analyzed"); - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("index", indexValues.remove(randomInt(2))) - .endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService(); - - mapperService.merge("type", new CompressedXContent(mapping), true, false); - mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("index", indexValues.remove(randomInt(1))) - .endObject() - .endObject().endObject().string(); - - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values")); - } - } - /** * Test for issue #9223 */ @@ -665,31 +365,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { new MappingMetaData(new CompressedXContent(mapping)); } - public void testBackcompatMergePaths() throws Exception { - String[] possiblePathValues = {"some_path", "anotherPath", null}; - MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService(); - XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp"); - String path1 = possiblePathValues[randomInt(2)]; - if (path1!=null) { - mapping1.field("path", path1); - } - mapping1.endObject() - .endObject().endObject(); - XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp"); - String path2 = possiblePathValues[randomInt(2)]; - if (path2!=null) { - mapping2.field("path", path2); - } - mapping2.endObject() - .endObject().endObject(); - - assertConflict(mapperService, "type", mapping1.string(), mapping2.string(), (path1 == path2 ? null : "Cannot update path in _timestamp value")); - } - void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { mapperService.merge("type", new CompressedXContent(mapping1), true, false); try { @@ -701,93 +376,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } } - public void testBackcompatDocValuesSerialization() throws Exception { - // default - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // just format specified - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly enabled - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", true) - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly disabled - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", false) - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly enabled, with format - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", true) - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly disabled, with format - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", false) - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - } - - void assertDocValuesSerialization(String mapping) throws Exception { - DocumentMapperParser parser = createIndex("test_doc_values", BWC_SETTINGS).mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues(); - docMapper = parser.parse("type", docMapper.mappingSource()); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues)); - assertAcked(client().admin().indices().prepareDelete("test_doc_values")); - } - - public void testBackcompatPath() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - - assertThat(request.timestamp(), is("1")); - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - - // _timestamp in a document never worked, so backcompat is ignoring the field - assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY"), Version.V_1_4_2), request.timestamp()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_timestamp")); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index 10eeecf6749..fa27e9bcfb7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -86,19 +86,6 @@ public class TTLMappingTests extends ESSingleNodeTestCase { assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions())); } - public void testSetValuesBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes").field("store", "no") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(true)); - assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(true)); // store was never serialized, so it was always lost - - } - public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() @@ -216,23 +203,6 @@ public class TTLMappingTests extends ESSingleNodeTestCase { assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", true).endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_ttl", "2d").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - // _ttl in a document never worked, so backcompat is ignoring the field - assertNull(request.ttl()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_ttl")); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", true).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 9510c6749eb..a53295d7fea 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -247,23 +247,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } } - public void testIndexFieldParsingBackcompat() throws IOException { - IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); - XContentBuilder indexMapping = XContentFactory.jsonBuilder(); - boolean enabled = randomBoolean(); - indexMapping.startObject() - .startObject("type") - .startObject("_index") - .field("enabled", enabled) - .endObject() - .endObject() - .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); - assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); - } - public void testTimestampParsing() throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); XContentBuilder indexMapping = XContentFactory.jsonBuilder(); @@ -272,10 +255,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .startObject("type") .startObject("_timestamp") .field("enabled", enabled) - .field("store", true) - .startObject("fielddata") - .field("format", "doc_values") - .endObject() .endObject() .endObject() .endObject(); diff --git a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index 7e59253ed97..3bbf7146ae4 100644 --- a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -217,76 +217,6 @@ public class SimpleRoutingIT extends ESIntegTestCase { } } - public void testRequiredRoutingWithPathMapping() throws Exception { - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject().startObject("properties") - .startObject("routing_field").field("type", "string").field("index", randomBoolean() ? "no" : "not_analyzed").field("doc_values", randomBoolean() ? "yes" : "no").endObject().endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet(); - - logger.info("--> check failure with different routing"); - try { - client().prepareIndex(indexOrAlias(), "type1", "1").setRouting("1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet(); - fail(); - } catch (ElasticsearchException e) { - assertThat(e.unwrapCause(), instanceOf(MapperParsingException.class)); - } - - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } - - public void testRequiredRoutingWithPathMappingBulk() throws Exception { - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareBulk().add( - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0")).execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } - public void testRequiredRoutingBulk() throws Exception { client().admin().indices().prepareCreate("test") .addAlias(new Alias("alias")) @@ -317,37 +247,6 @@ public class SimpleRoutingIT extends ESIntegTestCase { } } - public void testRequiredRoutingWithPathNumericType() throws Exception { - - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", 0).execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } - public void testRequiredRoutingMapping_variousAPIs() throws Exception { client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 3a3e44828c1..ad9ab044ad3 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -476,39 +476,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); } - public void testOmitTermFreqsAndPositions() throws Exception { - cluster().wipeTemplates(); // no randomized template for this test -- we are testing bwc compat and set version explicitly this might cause failures if an unsupported feature - // is added randomly via an index template. - Version version = Version.CURRENT; - int iters = scaledRandomIntBetween(10, 20); - for (int i = 0; i < iters; i++) { - try { - // backwards compat test! - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=string,omit_term_freq_and_positions=true") - .setSettings(settings(version).put(SETTING_NUMBER_OF_SHARDS, 1))); - assertThat(version.onOrAfter(Version.V_1_0_0_RC2), equalTo(false)); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox")); - - - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(searchResponse, 1l); - try { - client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get(); - fail("SearchPhaseExecutionException should have been thrown"); - } catch (SearchPhaseExecutionException e) { - assertTrue(e.toString().contains("IllegalStateException[field \"field1\" was indexed without position data; cannot run PhraseQuery")); - } - cluster().wipeIndices("test"); - } catch (MapperParsingException ex) { - assertThat(version.toString(), version.onOrAfter(Version.V_1_0_0_RC2), equalTo(true)); - assertThat(ex.getCause().getMessage(), equalTo("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead")); - } - version = randomVersion(random()); - } - } - public void testQueryStringAnalyzedWildcard() throws Exception { createIndex("test"); @@ -635,24 +602,8 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 0l); } - public void testTypeFilterTypeIndexedTests() throws Exception { - typeFilterTests("not_analyzed"); - } - - public void testTypeFilterTypeNotIndexedTests() throws Exception { - typeFilterTests("no"); - } - - private void typeFilterTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .addMapping("type2", jsonBuilder().startObject().startObject("type2") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .setUpdateAllTypes(true)); + public void testTypeFilter() throws Exception { + assertAcked(prepareCreate("test")); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), client().prepareIndex("test", "type2", "1").setSource("field1", "value1"), client().prepareIndex("test", "type1", "2").setSource("field1", "value1"), @@ -669,19 +620,7 @@ public class SearchQueryIT extends ESIntegTestCase { } public void testIdsQueryTestsIdIndexed() throws Exception { - idsQueryTests("not_analyzed"); - } - - public void testIdsQueryTestsIdNotIndexed() throws Exception { - idsQueryTests("no"); - } - - private void idsQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(client().admin().indices().prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_id").field("index", index).endObject() - .endObject().endObject())); + assertAcked(client().admin().indices().prepareCreate("test")); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), @@ -714,27 +653,13 @@ public class SearchQueryIT extends ESIntegTestCase { assertSearchHits(searchResponse, "1", "3"); } - public void testTermIndexQueryIndexed() throws Exception { - termIndexQueryTests("not_analyzed"); - } - - public void testTermIndexQueryNotIndexed() throws Exception { - termIndexQueryTests("no"); - } - - private void termIndexQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); + public void testTermIndexQuery() throws Exception { String[] indexNames = { "test1", "test2" }; for (String indexName : indexNames) { assertAcked(client() .admin() .indices() - .prepareCreate(indexName) - .setSettings(indexSettings) - .addMapping( - "type1", - jsonBuilder().startObject().startObject("type1").startObject("_index").field("index", index).endObject() - .endObject().endObject())); + .prepareCreate(indexName)); indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1")); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java index 2059b069ab1..f05938b4c98 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.messy.tests; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -33,10 +32,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -45,7 +42,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.test.ESIntegTestCase; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -53,7 +49,6 @@ import java.util.concurrent.CyclicBarrier; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; @@ -554,86 +549,6 @@ public class BulkTests extends ESIntegTestCase { assertThat(successes, equalTo(1)); } - // issue 4745 - public void testPreParsingSourceDueToMappingShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("path", "last_modified") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - - String brokenBuildRequestData = "{\"index\": {\"_id\": \"1\"}}\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": {\"_id\": \"2\"}}\n" + - "{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(get("test", "type", "2")); - } - - // issue 4745 - public void testPreParsingSourceDueToRoutingShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_routing") - .field("required", true) - .field("path", "my_routing") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - ensureYellow("test"); - - String brokenBuildRequestData = "{\"index\": {} }\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": { \"_id\" : \"24000\" } }\n" + - "{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(client().prepareGet("test", "type", "24000").setRouting("48000").get()); - } - - - // issue 4745 - public void testPreParsingSourceDueToIdShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_id") - .field("path", "my_id") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - ensureYellow("test"); - - String brokenBuildRequestData = "{\"index\": {} }\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": {} }\n" + - "{\"name\": \"Good\", \"my_id\" : \"48\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(get("test", "type", "48")); - } - // issue 4987 public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() { int bulkEntryCount = randomIntBetween(10, 50); diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 3fa9f1ffe1e..baeba9f4bbd 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -161,16 +161,13 @@ public class SizeFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // all are defaults, no need to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && (indexCreatedBefore2x == false || fieldType().stored() == false)) { + if (!includeDefaults && enabledState == Defaults.ENABLED_STATE) { return builder; } builder.startObject(contentType()); if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() == true)) { - builder.field("store", fieldType().stored()); - } builder.endObject(); return builder; } From 25d23091e29af0048ac8d69739b29bfd74a63c80 Mon Sep 17 00:00:00 2001 From: socurites Date: Thu, 24 Dec 2015 12:31:55 +0900 Subject: [PATCH 244/322] Edge NGram: "side" setting was depercated Edge NGram: "side" setting was depercated --- .../analysis/tokenfilters/edgengram-tokenfilter.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc index 3ba0edeb8ef..be37d24f7dd 100644 --- a/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc @@ -11,6 +11,6 @@ filter type: |Setting |Description |`min_gram` |Defaults to `1`. |`max_gram` |Defaults to `2`. -|`side` |Either `front` or `back`. Defaults to `front`. +|`side` |deprecated. Either `front` or `back`. Defaults to `front`. |====================================================== From 485915bbe780949a03a1bff0dcde6a81a39de3bb Mon Sep 17 00:00:00 2001 From: socurites Date: Thu, 24 Dec 2015 11:25:26 +0900 Subject: [PATCH 245/322] comma(,) was duplicated deleted it. --- docs/reference/analysis/charfilters.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/analysis/charfilters.asciidoc b/docs/reference/analysis/charfilters.asciidoc index a40cfffc054..c9f5805284c 100644 --- a/docs/reference/analysis/charfilters.asciidoc +++ b/docs/reference/analysis/charfilters.asciidoc @@ -3,7 +3,7 @@ Character filters are used to preprocess the string of characters before it is passed to the <>. -A character filter may be used to strip out HTML markup, , or to convert +A character filter may be used to strip out HTML markup, or to convert `"&"` characters to the word `"and"`. Elasticsearch has built in characters filters which can be From 23f736a0c185d5002f3e9567c21f6f5a0a9467a6 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 24 Dec 2015 15:28:09 +0100 Subject: [PATCH 246/322] Remove ParseContext.ignoredValue. This API is unused. --- .../index/mapper/ParseContext.java | 29 ------------------- .../index/mapper/core/StringFieldMapper.java | 3 -- .../mapper/core/TokenCountFieldMapper.java | 3 -- .../mapper/internal/RoutingFieldMapper.java | 6 ++-- .../mapper/internal/TimestampFieldMapper.java | 3 -- 5 files changed, 2 insertions(+), 42 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index 0a88e29c8d6..3c12f51a7f7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -34,10 +34,8 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.object.RootObjectMapper; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.Map; /** * @@ -288,16 +286,6 @@ public abstract class ParseContext { return in.id(); } - @Override - public void ignoredValue(String indexName, String value) { - in.ignoredValue(indexName, value); - } - - @Override - public String ignoredValue(String indexName) { - return in.ignoredValue(indexName); - } - @Override public void id(String id) { in.id(id); @@ -390,8 +378,6 @@ public abstract class ParseContext { private StringBuilder stringBuilder = new StringBuilder(); - private Map ignoredValues = new HashMap<>(); - private AllEntries allEntries = new AllEntries(); private float docBoost = 1.0f; @@ -421,7 +407,6 @@ public abstract class ParseContext { this.source = source == null ? null : sourceToParse.source(); this.path.reset(); this.allEntries = new AllEntries(); - this.ignoredValues.clear(); this.docBoost = 1.0f; this.dynamicMappingsUpdate = null; } @@ -523,16 +508,6 @@ public abstract class ParseContext { return id; } - @Override - public void ignoredValue(String indexName, String value) { - ignoredValues.put(indexName, value); - } - - @Override - public String ignoredValue(String indexName) { - return ignoredValues.get(indexName); - } - /** * Really, just the id mapper should set this. */ @@ -710,10 +685,6 @@ public abstract class ParseContext { public abstract String id(); - public abstract void ignoredValue(String indexName, String value); - - public abstract String ignoredValue(String indexName); - /** * Really, just the id mapper should set this. */ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index f8f9cb25ba1..3885690c7ec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -326,9 +326,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); } - if (fields.isEmpty()) { - context.ignoredValue(fieldType().name(), valueAndBoost.value()); - } } /** diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index 1fd1debe60c..85df5ea3d3b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -147,9 +147,6 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { } addIntegerFields(context, fields, count, valueAndBoost.boost()); } - if (fields.isEmpty()) { - context.ignoredValue(fieldType().name(), valueAndBoost.value()); - } } /** diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 4fb410dbdc8..5fd1d2f49a3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -205,11 +205,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper { if (context.sourceToParse().routing() != null) { String routing = context.sourceToParse().routing(); if (routing != null) { - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { - context.ignoredValue(fieldType().name(), routing); - return; + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + fields.add(new Field(fieldType().name(), routing, fieldType())); } - fields.add(new Field(fieldType().name(), routing, fieldType())); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 3771747bf66..e8a37a49e3e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -312,9 +312,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { protected void parseCreateField(ParseContext context, List fields) throws IOException { if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { - context.ignoredValue(fieldType().name(), String.valueOf(timestamp)); - } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new LongFieldMapper.CustomLongNumericField(timestamp, fieldType())); } From 0a816cd3438992497c0a3ec62c928b3b0c985a2c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 28 Dec 2015 11:58:43 +0100 Subject: [PATCH 247/322] Add tests for equals and hashCode and fix FiltersFunctionScoreQuery equals and hashCode impls Relates to #15676 --- .../function/FiltersFunctionScoreQuery.java | 7 +- .../functionscore/FunctionScoreTests.java | 144 +++++++++++++++++- 2 files changed, 142 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 3486690e270..a7b7300c9b6 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -369,12 +369,13 @@ public class FiltersFunctionScoreQuery extends Query { } FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o; return Objects.equals(this.subQuery, other.subQuery) && this.maxBoost == other.maxBoost && - Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && - Arrays.equals(this.filterFunctions, other.filterFunctions); + Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && + Objects.equals(this.scoreMode, other.scoreMode) && + Arrays.equals(this.filterFunctions, other.filterFunctions); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, filterFunctions); + return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 979c98e53c1..348a7fd67e5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -358,11 +358,11 @@ public class FunctionScoreTests extends ESTestCase { // now test all together functionExplanation = getFiltersFunctionScoreExplanation(searcher - , RANDOM_SCORE_FUNCTION - , FIELD_VALUE_FACTOR_FUNCTION - , GAUSS_DECAY_FUNCTION - , EXP_DECAY_FUNCTION - , LIN_DECAY_FUNCTION + , RANDOM_SCORE_FUNCTION + , FIELD_VALUE_FACTOR_FUNCTION + , GAUSS_DECAY_FUNCTION + , EXP_DECAY_FUNCTION + , LIN_DECAY_FUNCTION ); checkFiltersFunctionScoreExplanation(functionExplanation, "random score function (seed: 0)", 0); @@ -398,7 +398,7 @@ public class FunctionScoreTests extends ESTestCase { FiltersFunctionScoreQuery.FilterFunction[] filterFunctions = new FiltersFunctionScoreQuery.FilterFunction[scoreFunctions.length]; for (int i = 0; i < scoreFunctions.length; i++) { filterFunctions[i] = new FiltersFunctionScoreQuery.FilterFunction( - new TermQuery(TERM), scoreFunctions[i]); + new TermQuery(TERM), scoreFunctions[i]); } return new FiltersFunctionScoreQuery(new TermQuery(TERM), scoreMode, filterFunctions, Float.MAX_VALUE, Float.MAX_VALUE * -1, combineFunction); } @@ -610,4 +610,136 @@ public class FunctionScoreTests extends ESTestCase { assertNotNull(scorer.twoPhaseIterator()); } } + + public void testFunctionScoreHashCodeAndEquals() { + Float minScore = randomBoolean() ? null : 1.0f; + CombineFunction combineFunction = randomFrom(CombineFunction.values()); + float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat(); + ScoreFunction function = randomBoolean() ? null : new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + }; + + FunctionScoreQuery q = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost); + FunctionScoreQuery q1 = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost); + assertEquals(q, q); + assertEquals(q.hashCode(), q.hashCode()); + assertEquals(q, q1); + assertEquals(q.hashCode(), q1.hashCode()); + + FunctionScoreQuery diffQuery = new FunctionScoreQuery(new TermQuery(new Term("foo", "baz")), function, minScore, combineFunction, maxBoost); + FunctionScoreQuery diffMinScore = new FunctionScoreQuery(q.getSubQuery(), function, minScore == null ? 1.0f : null, combineFunction, maxBoost); + ScoreFunction otherFunciton = function == null ? new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + + } : null; + FunctionScoreQuery diffFunction = new FunctionScoreQuery(q.getSubQuery(), otherFunciton, minScore, combineFunction, maxBoost); + FunctionScoreQuery diffMaxBoost = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost == 1.0f ? 0.9f : 1.0f); + q1.setBoost(3.0f); + FunctionScoreQuery[] queries = new FunctionScoreQuery[] { + diffFunction, + diffMinScore, + diffQuery, + q, + q1, + diffMaxBoost + }; + final int numIters = randomIntBetween(20, 100); + for (int i = 0; i < numIters; i++) { + FunctionScoreQuery left = randomFrom(queries); + FunctionScoreQuery right = randomFrom(queries); + if (left == right) { + assertEquals(left, right); + assertEquals(left.hashCode(), right.hashCode()); + } else { + assertNotEquals(left + " == " + right, left, right); + } + } + + } + + public void testFilterFunctionScoreHashCodeAndEquals() { + ScoreMode mode = randomFrom(ScoreMode.values()); + CombineFunction combineFunction = randomFrom(CombineFunction.values()); + ScoreFunction scoreFunction = new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + }; + Float minScore = randomBoolean() ? null : 1.0f; + Float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat(); + + FilterFunction function = new FilterFunction(new TermQuery(new Term("filter", "query")), scoreFunction); + FiltersFunctionScoreQuery q = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery q1 = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + assertEquals(q, q); + assertEquals(q.hashCode(), q.hashCode()); + assertEquals(q, q1); + assertEquals(q.hashCode(), q1.hashCode()); + FiltersFunctionScoreQuery diffCombineFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction == CombineFunction.AVG ? CombineFunction.MAX : CombineFunction.AVG); + FiltersFunctionScoreQuery diffQuery = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "baz")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery diffMode = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode == ScoreMode.AVG ? ScoreMode.FIRST : ScoreMode.AVG, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery diffMaxBoost = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost == 1.0f ? 0.9f : 1.0f, minScore, combineFunction); + FiltersFunctionScoreQuery diffMinScore = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore == null ? 0.9f : null, combineFunction); + FilterFunction otherFunc = new FilterFunction(new TermQuery(new Term("filter", "other_query")), scoreFunction); + FiltersFunctionScoreQuery diffFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, randomBoolean() ? new FilterFunction[] {function, otherFunc} : new FilterFunction[] {otherFunc}, maxBoost, minScore, combineFunction); + q1.setBoost(3.0f); + + FiltersFunctionScoreQuery[] queries = new FiltersFunctionScoreQuery[] { + diffQuery, + diffMaxBoost, + diffMinScore, + diffMode, + diffFunc, + q, + q1, + diffCombineFunc + }; + final int numIters = randomIntBetween(20, 100); + for (int i = 0; i < numIters; i++) { + FiltersFunctionScoreQuery left = randomFrom(queries); + FiltersFunctionScoreQuery right = randomFrom(queries); + if (left == right) { + assertEquals(left, right); + assertEquals(left.hashCode(), right.hashCode()); + } else { + assertNotEquals(left + " == " + right, left, right); + } + } + } } From e8daad66d1d7d03f967317028d1640f5e6eb69a0 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 28 Dec 2015 12:52:38 +0100 Subject: [PATCH 248/322] apply more feedback from @dakrone --- .../cluster/routing/RoutingNodes.java | 20 ------------------- .../recovery/RecoverySourceHandler.java | 13 +++++++----- 2 files changed, 8 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index e98eb9d4a47..3a2567e3f46 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -857,28 +857,8 @@ public class RoutingNodes implements Iterable { } } } -// if (outgoing != value.outgoing) { -// incoming = 0; -// outgoing = 0; -// for (ShardRouting routing : routingNode) { -// if (routing.initializing()) { -// incoming++; -// } else if (routing.relocating()) { -// outgoing++; -// } -// if (routing.primary() && (routing.initializing() && routing.relocatingNodeId() != null) == false) { // we don't count the initialization end of the primary relocation -// List shardRoutings = routingNodes.assignedShards.get(routing.shardId()); -// for (ShardRouting assigned : shardRoutings) { -// if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { -// outgoing++; -// } -// } -// } -// } -// } assert incoming == value.incoming : incoming + " != " + value.incoming; assert outgoing == value.outgoing : outgoing + " != " + value.outgoing + " node: " + routingNode; - } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 10ab9956df4..94c78efccd8 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -66,6 +66,12 @@ import java.util.stream.StreamSupport; * RecoverySourceHandler handles the three phases of shard recovery, which is * everything relating to copying the segment files as well as sending translog * operations across the wire once the segments have been copied. + * + * Note: There is always one source handler per recovery that handles all the + * file and translog transfer. This handler is completely isolated from other recoveries + * while the {@link RateLimiter} passed via {@link RecoverySettings} is shared across recoveries + * originating from this nodes to throttle the number bytes send during file transfer. The transaction log + * phase bypasses the rate limiter entirely. */ public class RecoverySourceHandler { @@ -455,10 +461,6 @@ public class RecoverySourceHandler { // index docs to replicas while the index files are recovered // the lock can potentially be removed, in which case, it might // make sense to re-enable throttling in this phase -// if (recoverySettings.rateLimiter() != null) { -// recoverySettings.rateLimiter().pause(size); -// } - cancellableThreads.execute(() -> { final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); @@ -551,6 +553,7 @@ public class RecoverySourceHandler { cancellableThreads.execute(() -> { // Pause using the rate limiter, if desired, to throttle the recovery final long throttleTimeInNanos; + // always fetch the ratelimiter - it might be updated in real-time on the recovery settings final RateLimiter rl = recoverySettings.rateLimiter(); if (rl != null) { long bytes = bytesSinceLastPause.addAndGet(content.length()); @@ -592,7 +595,7 @@ public class RecoverySourceHandler { for (int i = 0; i < files.length; i++) { final StoreFileMetaData md = files[i]; try (final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { - // it's fine that we are only having the indexInput int he try/with block. The copy methods handles + // it's fine that we are only having the indexInput in the try/with block. The copy methods handles // exceptions during close correctly and doesn't hide the original exception. Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStreamFactory.apply(md)); } catch (Throwable t) { From 35cc749c9af8f0546b315a4fa0dfe5870b0c4cd8 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 28 Dec 2015 08:07:37 -0500 Subject: [PATCH 249/322] Correctly release threads from starting gate in o.e.c.c.CacheTests --- .../common/cache/CacheTests.java | 194 ++++++++++-------- 1 file changed, 114 insertions(+), 80 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index d0b7d998dd5..0985bc4b88e 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -494,33 +494,41 @@ public class CacheTests extends ESTestCase { public void testComputeIfAbsentCallsOnce() throws InterruptedException { int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - List threads = new ArrayList<>(); AtomicReferenceArray flags = new AtomicReferenceArray(numberOfEntries); for (int j = 0; j < numberOfEntries; j++) { flags.set(j, false); } - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { + try { try { - cache.computeIfAbsent(j, key -> { - assertTrue(flags.compareAndSet(key, false, true)); - return Integer.toString(key); - }); - } catch (ExecutionException e) { - throw new RuntimeException(e); + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + for (int j = 0; j < numberOfEntries; j++) { + try { + cache.computeIfAbsent(j, key -> { + assertTrue(flags.compareAndSet(key, false, true)); + return Integer.toString(key); + }); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); } public void testComputeIfAbsentThrowsExceptionIfLoaderReturnsANullValue() { @@ -560,30 +568,39 @@ public class CacheTests extends ESTestCase { int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); + CountDownLatch startGate = new CountDownLatch(1); CountDownLatch deadlockLatch = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); List threads = new ArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - Random random = new Random(random().nextLong()); - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { - Key key = new Key(random.nextInt(numberOfEntries)); + try { try { - cache.computeIfAbsent(key, k -> { - if (k.key == 0) { - return 0; - } else { - Integer value = cache.get(new Key(k.key / 2)); - return value != null ? value : 0; - } - }); - } catch (ExecutionException e) { - fail(e.getMessage()); + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Key key = new Key(random.nextInt(numberOfEntries)); + try { + cache.computeIfAbsent(key, k -> { + if (k.key == 0) { + return 0; + } else { + Integer value = cache.get(new Key(k.key / 2)); + return value != null ? value : 0; + } + }); + } catch (ExecutionException e) { + fail(e.getMessage()); + } + } + } finally { + // successfully avoided deadlock, release the main thread + deadlockLatch.countDown(); } - // successfully avoided deadlock, release the main thread - deadlockLatch.countDown(); }); threads.add(thread); thread.start(); @@ -614,7 +631,7 @@ public class CacheTests extends ESTestCase { }, 1, 1, TimeUnit.SECONDS); // everything is setup, release the hounds - latch.countDown(); + startGate.countDown(); // wait for either deadlock to be detected or the threads to terminate deadlockLatch.await(); @@ -628,49 +645,57 @@ public class CacheTests extends ESTestCase { public void testCachePollution() throws InterruptedException { int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - latch.countDown(); - Random random = new Random(random().nextLong()); - for (int j = 0; j < numberOfEntries; j++) { - Integer key = random.nextInt(numberOfEntries); - boolean first; - boolean second; - do { - first = random.nextBoolean(); - second = random.nextBoolean(); - } while (first && second); - if (first) { - try { - cache.computeIfAbsent(key, k -> { - if (random.nextBoolean()) { - return Integer.toString(k); - } else { - throw new Exception("testCachePollution"); - } - }); - } catch (ExecutionException e) { - assertNotNull(e.getCause()); - assertThat(e.getCause(), instanceOf(Exception.class)); - assertEquals(e.getCause().getMessage(), "testCachePollution"); - } - } else if (second) { - cache.invalidate(key); - } else { - cache.get(key); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Integer key = random.nextInt(numberOfEntries); + boolean first; + boolean second; + do { + first = random.nextBoolean(); + second = random.nextBoolean(); + } while (first && second); + if (first) { + try { + cache.computeIfAbsent(key, k -> { + if (random.nextBoolean()) { + return Integer.toString(k); + } else { + throw new Exception("testCachePollution"); + } + }); + } catch (ExecutionException e) { + assertNotNull(e.getCause()); + assertThat(e.getCause(), instanceOf(Exception.class)); + assertEquals(e.getCause().getMessage(), "testCachePollution"); + } + } else if (second) { + cache.invalidate(key); + } else { + cache.get(key); + } + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); } // test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key @@ -683,24 +708,33 @@ public class CacheTests extends ESTestCase { .weigher((k, v) -> 2) .build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - Random random = new Random(random().nextLong()); - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { - Integer key = random.nextInt(numberOfEntries); - cache.put(key, Integer.toString(j)); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; + } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Integer key = random.nextInt(numberOfEntries); + cache.put(key, Integer.toString(j)); + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); + cache.refresh(); assertEquals(500, cache.count()); } From 84c4ab6c1864c849848c395e50f2bc183d3a304c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 28 Dec 2015 08:12:31 -0500 Subject: [PATCH 250/322] Correctly release threads from starting gate in o.e.c.ClusterServiceIT --- .../cluster/ClusterServiceIT.java | 40 +++++++++++-------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 9d453ead4de..d9096f5fd0d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -831,30 +831,38 @@ public class ClusterServiceIT extends ESIntegTestCase { counts.merge(executor, 1, (previous, one) -> previous + one); } - CountDownLatch startingGun = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { final int index = i; Thread thread = new Thread(() -> { - startingGun.countDown(); - for (int j = 0; j < tasksSubmittedPerThread; j++) { - ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); - clusterService.submitStateUpdateTask( - Thread.currentThread().getName(), - new Task(), - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor, - listener); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; + } + for (int j = 0; j < tasksSubmittedPerThread; j++) { + ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); + clusterService.submitStateUpdateTask( + Thread.currentThread().getName(), + new Task(), + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor, + listener); + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - startingGun.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); // wait until all the cluster state updates have been processed updateLatch.await(); From 5eb7555ffb3ce8f4575839634d78d3d4feaaba80 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 28 Dec 2015 15:26:49 +0100 Subject: [PATCH 251/322] Make text parsing less lenient. It now requires that the parser is on a value. --- .../elasticsearch/common/geo/GeoUtils.java | 2 +- .../common/xcontent/XContentLocation.java | 5 +++ .../xcontent/json/JsonXContentParser.java | 5 ++- .../support/AbstractXContentParser.java | 2 +- .../index/query/GeohashCellQuery.java | 2 +- .../mapper/multifield/MultiFieldTests.java | 37 ------------------- .../test/lang_python/30_update.yaml | 10 ++--- 7 files changed, 16 insertions(+), 47 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 4ed4a2860a3..b4aaf5830ca 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -384,7 +384,7 @@ public class GeoUtils { if(parser.currentToken() == Token.START_OBJECT) { while(parser.nextToken() != Token.END_OBJECT) { if(parser.currentToken() == Token.FIELD_NAME) { - String field = parser.text(); + String field = parser.currentName(); if(LATITUDE.equals(field)) { parser.nextToken(); switch (parser.currentToken()) { diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentLocation.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentLocation.java index ade2a457797..43ab7503cd1 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentLocation.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentLocation.java @@ -34,4 +34,9 @@ public class XContentLocation { this.lineNumber = lineNumber; this.columnNumber = columnNumber; } + + @Override + public String toString() { + return lineNumber + ":" + columnNumber; + } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java index 2e759367a77..fbdf66e73ee 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java @@ -79,7 +79,10 @@ public class JsonXContentParser extends AbstractXContentParser { @Override public String text() throws IOException { - return parser.getText(); + if (currentToken().isValue()) { + return parser.getText(); + } + throw new IllegalStateException("Can't get text on a " + currentToken() + " at " + getTokenLocation()); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index 88dffcf3395..d8216be4ad7 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -194,7 +194,7 @@ public abstract class AbstractXContentParser implements XContentParser { protected abstract double doDoubleValue() throws IOException; @Override - public String textOrNull() throws IOException { + public final String textOrNull() throws IOException { if (currentToken() == Token.VALUE_NULL) { return null; } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 1649d12f186..07e92a6dc16 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -297,7 +297,7 @@ public class GeohashCellQuery { while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == Token.FIELD_NAME) { - String field = parser.text(); + String field = parser.currentName(); if (parseContext.isDeprecatedSetting(field)) { // skip diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 3c301e93fa5..016c3b58144 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -335,43 +335,6 @@ public class MultiFieldTests extends ESSingleNodeTestCase { assertThat(f.stringValue(), equalTo("-1,-1")); assertThat(f.fieldType().stored(), equalTo(false)); assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - json = jsonBuilder().startObject() - .startArray("b").startArray().value(-1).value(-1).endArray().startArray().value(-2).value(-2).endArray().endArray() - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getFields("b")[0]; - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getFields("b")[1]; - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-2.0,-2.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-2.0, -2.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - // NOTE: "]" B/c the lat,long aren't specified as a string, we miss the actual values when parsing the multi - // fields. We already skipped over the coordinates values and can't get to the coordinates. - // This happens if coordinates are specified as array and object. - assertThat(f.stringValue(), equalTo("]")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); } public void testConvertMultiFieldCompletion() throws Exception { diff --git a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml index 4f8926e0db6..6e6266ee9c9 100644 --- a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml +++ b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml @@ -18,9 +18,8 @@ id: 1 body: script: - script: - inline: "ctx[\"_source\"][\"myfield\"]=\"bar\"" - lang: python + inline: "ctx[\"_source\"][\"myfield\"]=\"bar\"" + lang: python - do: get: index: test @@ -48,9 +47,8 @@ id: 1 body: script: - script: - inline: "a=42; ctx[\"_source\"][\"myfield\"]=\"bar\"" - lang: python + inline: "a=42; ctx[\"_source\"][\"myfield\"]=\"bar\"" + lang: python - do: get: index: test From 507bb11345f11fadd3b84f9e46bcc641d5c47abe Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:02:52 +0100 Subject: [PATCH 252/322] Split two-element array into proper variables --- .../allocation/allocator/BalancedShardsAllocator.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 17e9de19601..843d664d745 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -173,7 +173,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final float indexBalance; private final float shardBalance; - private final float[] theta; + private final float theta0; + private final float theta1; public WeightFunction(float indexBalance, float shardBalance) { @@ -181,7 +182,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (sum <= 0.0f) { throw new IllegalArgumentException("Balance factors must sum to a value > 0 but was: " + sum); } - theta = new float[]{shardBalance / sum, indexBalance / sum}; + theta0 = shardBalance / sum; + theta1 = indexBalance / sum; this.indexBalance = indexBalance; this.shardBalance = shardBalance; } @@ -189,8 +191,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards public float weight(Operation operation, Balancer balancer, ModelNode node, String index) { final float weightShard = (node.numShards() - balancer.avgShardsPerNode()); final float weightIndex = (node.numShards(index) - balancer.avgShardsPerNode(index)); - assert theta != null; - return theta[0] * weightShard + theta[1] * weightIndex; + return theta0 * weightShard + theta1 * weightIndex; } } From 0fd19008b1302ce64c6b4d96ef5f4b768dae817b Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:04:57 +0100 Subject: [PATCH 253/322] Precalculate avgShardsPerNode --- .../routing/allocation/allocator/BalancedShardsAllocator.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 843d664d745..5f0fed1f25a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -228,6 +228,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final float threshold; private final MetaData metaData; + private final float avgShardsPerNode; private final Predicate assignedFilter = shard -> shard.assignedToNode(); @@ -241,6 +242,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards nodes.put(node.nodeId(), new ModelNode(node.nodeId())); } metaData = routingNodes.metaData(); + avgShardsPerNode = ((float) metaData.totalNumberOfShards()) / nodes.size(); } /** @@ -261,7 +263,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * Returns the global average of shards per node */ public float avgShardsPerNode() { - return ((float) metaData.totalNumberOfShards()) / nodes.size(); + return avgShardsPerNode; } /** From 1bb7ca8a85aac48d16ac04dea89bbc0e2046b28c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:06:00 +0100 Subject: [PATCH 254/322] Removed unused methods --- .../allocator/BalancedShardsAllocator.java | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 5f0fed1f25a..27557578e23 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -856,14 +856,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return index == null ? 0 : index.numShards(); } - public Collection shards() { - Collection result = new ArrayList<>(); - for (ModelIndex index : indices.values()) { - result.addAll(index.getAllShards()); - } - return result; - } - public int highestPrimary(String index) { ModelIndex idx = indices.get(index); if (idx != null) { @@ -941,10 +933,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return id; } - public Decision getDecicion(ShardRouting shard) { - return shards.get(shard); - } - public int numShards() { return shards.size(); } From 67905b384f12541e1956bde05a92c90bb5a556f2 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:09:06 +0100 Subject: [PATCH 255/322] Remove unused class --- .../allocator/BalancedShardsAllocator.java | 53 ++++++------------- 1 file changed, 17 insertions(+), 36 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 27557578e23..2e0e51473f4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -188,7 +188,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards this.shardBalance = shardBalance; } - public float weight(Operation operation, Balancer balancer, ModelNode node, String index) { + public float weight(Balancer balancer, ModelNode node, String index) { final float weightShard = (node.numShards() - balancer.avgShardsPerNode()); final float weightIndex = (node.numShards(index) - balancer.avgShardsPerNode(index)); return theta0 * weightShard + theta1 * weightIndex; @@ -196,25 +196,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } - /** - * An enum that donates the actual operation the {@link WeightFunction} is - * applied to. - */ - public static enum Operation { - /** - * Provided during balance operations. - */ - BALANCE, - /** - * Provided during initial allocation operation for unassigned shards. - */ - ALLOCATE, - /** - * Provided during move operation. - */ - MOVE - } - /** * A {@link Balancer} */ @@ -277,7 +258,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards /** * Returns a new {@link NodeSorter} that sorts the nodes based on their * current weight with respect to the index passed to the sorter. The - * returned sorter is not sorted. Use {@link NodeSorter#reset(org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Operation, String)} + * returned sorter is not sorted. Use {@link NodeSorter#reset(String)} * to sort based on an index. */ private NodeSorter newNodeSorter() { @@ -351,8 +332,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (onlyAssign == false && changed == false && allocation.deciders().canRebalance(allocation).type() == Type.YES) { NodeSorter sorter = newNodeSorter(); if (nodes.size() > 1) { /* skip if we only have one node */ - for (String index : buildWeightOrderedIndidces(Operation.BALANCE, sorter)) { - sorter.reset(Operation.BALANCE, index); + for (String index : buildWeightOrderedIndidces(sorter)) { + sorter.reset(index); final float[] weights = sorter.weights; final ModelNode[] modelNodes = sorter.modelNodes; int lowIdx = 0; @@ -391,14 +372,14 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } /* pass the delta to the replication function to prevent relocations that only swap the weights of the two nodes. * a relocation must bring us closer to the balance if we only achieve the same delta the relocation is useless */ - if (tryRelocateShard(Operation.BALANCE, minNode, maxNode, index, delta)) { + if (tryRelocateShard(minNode, maxNode, index, delta)) { /* * TODO we could be a bit smarter here, we don't need to fully sort necessarily * we could just find the place to insert linearly but the win might be minor * compared to the added complexity */ - weights[lowIdx] = sorter.weight(Operation.BALANCE, modelNodes[lowIdx]); - weights[highIdx] = sorter.weight(Operation.BALANCE, modelNodes[highIdx]); + weights[lowIdx] = sorter.weight(modelNodes[lowIdx]); + weights[highIdx] = sorter.weight(modelNodes[highIdx]); sorter.sort(0, weights.length); lowIdx = 0; highIdx = weights.length - 1; @@ -442,11 +423,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * average. To re-balance we need to move shards back eventually likely * to the nodes we relocated them from. */ - private String[] buildWeightOrderedIndidces(Operation operation, NodeSorter sorter) { + private String[] buildWeightOrderedIndidces(NodeSorter sorter) { final String[] indices = this.indices.toArray(new String[this.indices.size()]); final float[] deltas = new float[indices.length]; for (int i = 0; i < deltas.length; i++) { - sorter.reset(operation, indices[i]); + sorter.reset(indices[i]); deltas[i] = sorter.delta(); } new IntroSorter() { @@ -506,7 +487,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards final ModelNode sourceNode = nodes.get(node.nodeId()); assert sourceNode != null; final NodeSorter sorter = newNodeSorter(); - sorter.reset(Operation.MOVE, shard.getIndex()); + sorter.reset(shard.getIndex()); final ModelNode[] nodes = sorter.modelNodes; assert sourceNode.containsShard(shard); /* @@ -653,7 +634,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards */ if (!node.containsShard(shard)) { node.addShard(shard, Decision.ALWAYS); - float currentWeight = weight.weight(Operation.ALLOCATE, this, node, shard.index()); + float currentWeight = weight.weight(this, node, shard.index()); /* * Remove the shard from the node again this is only a * simulation @@ -751,7 +732,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * balance model. Iff this method returns a true the relocation has already been executed on the * simulation model as well as on the cluster. */ - private boolean tryRelocateShard(Operation operation, ModelNode minNode, ModelNode maxNode, String idx, float minCost) { + private boolean tryRelocateShard(ModelNode minNode, ModelNode maxNode, String idx, float minCost) { final ModelIndex index = maxNode.getIndex(idx); Decision decision = null; if (index != null) { @@ -774,7 +755,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards Decision srcDecision; if ((srcDecision = maxNode.removeShard(shard)) != null) { minNode.addShard(shard, srcDecision); - final float delta = weight.weight(operation, this, minNode, idx) - weight.weight(operation, this, maxNode, idx); + final float delta = weight.weight(this, minNode, idx) - weight.weight(this, maxNode, idx); if (delta < minCost || (candidate != null && delta == minCost && candidate.id() > shard.id())) { /* this last line is a tie-breaker to make the shard allocation alg deterministic @@ -992,16 +973,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * Resets the sorter, recalculates the weights per node and sorts the * nodes by weight, with minimal weight first. */ - public void reset(Operation operation, String index) { + public void reset(String index) { this.index = index; for (int i = 0; i < weights.length; i++) { - weights[i] = weight(operation, modelNodes[i]); + weights[i] = weight(modelNodes[i]); } sort(0, modelNodes.length); } - public float weight(Operation operation, ModelNode node) { - return function.weight(operation, balancer, node, index); + public float weight(ModelNode node) { + return function.weight(balancer, node, index); } @Override From 5bd31a6cca0d7ea9abbd01be19b0a8a66d6ab4a7 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:10:21 +0100 Subject: [PATCH 256/322] Fix typo in method name --- .../routing/allocation/allocator/BalancedShardsAllocator.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 2e0e51473f4..4c7b691a5c1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -332,7 +332,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (onlyAssign == false && changed == false && allocation.deciders().canRebalance(allocation).type() == Type.YES) { NodeSorter sorter = newNodeSorter(); if (nodes.size() > 1) { /* skip if we only have one node */ - for (String index : buildWeightOrderedIndidces(sorter)) { + for (String index : buildWeightOrderedIndices(sorter)) { sorter.reset(index); final float[] weights = sorter.weights; final ModelNode[] modelNodes = sorter.modelNodes; @@ -423,7 +423,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * average. To re-balance we need to move shards back eventually likely * to the nodes we relocated them from. */ - private String[] buildWeightOrderedIndidces(NodeSorter sorter) { + private String[] buildWeightOrderedIndices(NodeSorter sorter) { final String[] indices = this.indices.toArray(new String[this.indices.size()]); final float[] deltas = new float[indices.length]; for (int i = 0; i < deltas.length; i++) { From fc0a33be052855ae7b5e1b293bf111c750d75aab Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:19:26 +0100 Subject: [PATCH 257/322] Eliminate adding/removing shard to simulate weight of added shard / removed shard Removal of the pattern node.addShard() -> calculate weight -> node.removeShard() which is expensive as, beside map lookups, it invalidates caching of precomputed values in ModelNode and ModelIndex. Replaced by adding an additional parameter to the weight function which accounts for the added / removed shard. --- .../allocator/BalancedShardsAllocator.java | 44 +++++++++---------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 4c7b691a5c1..029207261ce 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -189,8 +189,20 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public float weight(Balancer balancer, ModelNode node, String index) { - final float weightShard = (node.numShards() - balancer.avgShardsPerNode()); - final float weightIndex = (node.numShards(index) - balancer.avgShardsPerNode(index)); + return weight(balancer, node, index, 0); + } + + public float weightShardAdded(Balancer balancer, ModelNode node, String index) { + return weight(balancer, node, index, 1); + } + + public float weightShardRemoved(Balancer balancer, ModelNode node, String index) { + return weight(balancer, node, index, -1); + } + + private float weight(Balancer balancer, ModelNode node, String index, int numAdditionalShards) { + final float weightShard = (node.numShards() + numAdditionalShards - balancer.avgShardsPerNode()); + final float weightIndex = (node.numShards(index) + numAdditionalShards - balancer.avgShardsPerNode(index)); return theta0 * weightShard + theta1 * weightIndex; } @@ -627,20 +639,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (throttledNodes.contains(node)) { continue; } - /* - * The shard we add is removed below to simulate the - * addition for weight calculation we use Decision.ALWAYS to - * not violate the not null condition. - */ if (!node.containsShard(shard)) { - node.addShard(shard, Decision.ALWAYS); - float currentWeight = weight.weight(this, node, shard.index()); - /* - * Remove the shard from the node again this is only a - * simulation - */ - Decision removed = node.removeShard(shard); - assert removed != null; + // simulate weight if we would add shard to node + float currentWeight = weight.weightShardAdded(this, node, shard.index()); /* * Unless the operation is not providing any gains we * don't check deciders @@ -743,19 +744,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards final RoutingNode node = routingNodes.node(minNode.getNodeId()); ShardRouting candidate = null; final AllocationDeciders deciders = allocation.deciders(); - /* make a copy since we modify this list in the loop */ - final ArrayList shards = new ArrayList<>(index.getAllShards()); - for (ShardRouting shard : shards) { + for (ShardRouting shard : index.getAllShards()) { if (shard.started()) { // skip initializing, unassigned and relocating shards we can't relocate them anyway Decision allocationDecision = deciders.canAllocate(shard, node, allocation); Decision rebalanceDecision = deciders.canRebalance(shard, allocation); if (((allocationDecision.type() == Type.YES) || (allocationDecision.type() == Type.THROTTLE)) && ((rebalanceDecision.type() == Type.YES) || (rebalanceDecision.type() == Type.THROTTLE))) { - Decision srcDecision; - if ((srcDecision = maxNode.removeShard(shard)) != null) { - minNode.addShard(shard, srcDecision); - final float delta = weight.weight(this, minNode, idx) - weight.weight(this, maxNode, idx); + if (maxNode.containsShard(shard)) { + // simulate moving shard from maxNode to minNode + final float delta = weight.weightShardAdded(this, minNode, idx) - weight.weightShardRemoved(this, maxNode, idx); if (delta < minCost || (candidate != null && delta == minCost && candidate.id() > shard.id())) { /* this last line is a tie-breaker to make the shard allocation alg deterministic @@ -764,8 +762,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards candidate = shard; decision = new Decision.Multi().add(allocationDecision).add(rebalanceDecision); } - minNode.removeShard(shard); - maxNode.addShard(shard, srcDecision); } } } From 207dfc457dc310eb8b9e098f938c92c5dea79177 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 10:20:55 +0100 Subject: [PATCH 258/322] Remove superfluous method --- .../allocation/allocator/BalancedShardsAllocator.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 029207261ce..8cdc9b44b3f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -259,14 +259,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return avgShardsPerNode; } - /** - * Returns the global average of primaries per node - */ - public float avgPrimariesPerNode() { - return ((float) metaData.numberOfShards()) / nodes.size(); - } - - /** * Returns a new {@link NodeSorter} that sorts the nodes based on their * current weight with respect to the index passed to the sorter. The From 1536d7fe3722cfeed0c47a68d8cf054f925d9eed Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 11:07:41 +0100 Subject: [PATCH 259/322] For rebalancing an index, only consider nodes that currently have a shard of that index or where the index can be allocated This allows to prune a large number of nodes in case of hot/warm setup --- .../allocator/BalancedShardsAllocator.java | 44 +++++++++++--- .../allocation/decider/AllocationDecider.java | 9 +++ .../decider/AllocationDeciders.java | 20 +++++++ .../decider/FilterAllocationDecider.java | 60 +++++++++++++------ 4 files changed, 107 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 8cdc9b44b3f..9f34bd139d5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.IntroSorter; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -336,12 +337,33 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (onlyAssign == false && changed == false && allocation.deciders().canRebalance(allocation).type() == Type.YES) { NodeSorter sorter = newNodeSorter(); if (nodes.size() > 1) { /* skip if we only have one node */ + AllocationDeciders deciders = allocation.deciders(); + final ModelNode[] modelNodes = sorter.modelNodes; + final float[] weights = sorter.weights; for (String index : buildWeightOrderedIndices(sorter)) { - sorter.reset(index); - final float[] weights = sorter.weights; - final ModelNode[] modelNodes = sorter.modelNodes; + IndexMetaData indexMetaData = metaData.index(index); + + // find nodes that have a shard of this index or where shards of this index are allowed to stay + // move these nodes to the front of modelNodes so that we can only balance based on these nodes + int relevantNodes = 0; + for (int i = 0; i < modelNodes.length; i++) { + ModelNode modelNode = modelNodes[i]; + if (modelNode.getIndex(index) != null + || deciders.canAllocate(indexMetaData, routingNodes.node(modelNode.getNodeId()), allocation).type() != Type.NO) { + // swap nodes at position i and relevantNodes + modelNodes[i] = modelNodes[relevantNodes]; + modelNodes[relevantNodes] = modelNode; + relevantNodes++; + } + } + + if (relevantNodes < 2) { + continue; + } + + sorter.reset(index, 0, relevantNodes); int lowIdx = 0; - int highIdx = weights.length - 1; + int highIdx = relevantNodes - 1; while (true) { final ModelNode minNode = modelNodes[lowIdx]; final ModelNode maxNode = modelNodes[highIdx]; @@ -384,9 +406,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards */ weights[lowIdx] = sorter.weight(modelNodes[lowIdx]); weights[highIdx] = sorter.weight(modelNodes[highIdx]); - sorter.sort(0, weights.length); + sorter.sort(0, relevantNodes); lowIdx = 0; - highIdx = weights.length - 1; + highIdx = relevantNodes - 1; changed = true; continue; } @@ -961,12 +983,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * Resets the sorter, recalculates the weights per node and sorts the * nodes by weight, with minimal weight first. */ - public void reset(String index) { + public void reset(String index, int from, int to) { this.index = index; - for (int i = 0; i < weights.length; i++) { + for (int i = from; i < to; i++) { weights[i] = weight(modelNodes[i]); } - sort(0, modelNodes.length); + sort(from, to); + } + + public void reset(String index) { + reset(index, 0, modelNodes.length); } public float weight(ModelNode node) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java index a6204485d7d..3bd4069ac73 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -73,6 +74,14 @@ public abstract class AllocationDecider extends AbstractComponent { return Decision.ALWAYS; } + /** + * Returns a {@link Decision} whether the given shard routing can be allocated at all at this state of the + * {@link RoutingAllocation}. The default is {@link Decision#ALWAYS}. + */ + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + return Decision.ALWAYS; + } + /** * Returns a {@link Decision} whether the given node can allow any allocation at all at this state of the * {@link RoutingAllocation}. The default is {@link Decision#ALWAYS}. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java index f57c48e8a75..059748c3f62 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -120,6 +121,25 @@ public class AllocationDeciders extends AllocationDecider { return ret; } + @Override + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + Decision.Multi ret = new Decision.Multi(); + for (AllocationDecider allocationDecider : allocations) { + Decision decision = allocationDecider.canAllocate(indexMetaData, node, allocation); + // short track if a NO is returned. + if (decision == Decision.NO) { + if (!allocation.debugDecision()) { + return decision; + } else { + ret.add(decision); + } + } else if (decision != Decision.ALWAYS) { + ret.add(decision); + } + } + return ret; + } + @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { Decision.Multi ret = new Decision.Multi(); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index 4c451e7fffa..eb9fe10e965 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -88,29 +88,37 @@ public class FilterAllocationDecider extends AllocationDecider { return shouldFilter(shardRouting, node, allocation); } + @Override + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + return shouldFilter(indexMetaData, node, allocation); + } + @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return shouldFilter(shardRouting, node, allocation); } private Decision shouldFilter(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - if (clusterRequireFilters != null) { - if (!clusterRequireFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global required filters [%s]", clusterRequireFilters); - } - } - if (clusterIncludeFilters != null) { - if (!clusterIncludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global include filters [%s]", clusterIncludeFilters); - } - } - if (clusterExcludeFilters != null) { - if (clusterExcludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches global exclude filters [%s]", clusterExcludeFilters); - } - } + Decision decision = shouldClusterFilter(node, allocation); + if (decision != null) return decision; - IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); + decision = shouldIndexFilter(allocation.routingNodes().metaData().index(shardRouting.index()), node, allocation); + if (decision != null) return decision; + + return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + } + + private Decision shouldFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { + Decision decision = shouldClusterFilter(node, allocation); + if (decision != null) return decision; + + decision = shouldIndexFilter(indexMd, node, allocation); + if (decision != null) return decision; + + return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + } + + private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { if (indexMd.requireFilters() != null) { if (!indexMd.requireFilters().match(node.node())) { return allocation.decision(Decision.NO, NAME, "node does not match index required filters [%s]", indexMd.requireFilters()); @@ -126,8 +134,26 @@ public class FilterAllocationDecider extends AllocationDecider { return allocation.decision(Decision.NO, NAME, "node matches index exclude filters [%s]", indexMd.excludeFilters()); } } + return null; + } - return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) { + if (clusterRequireFilters != null) { + if (!clusterRequireFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node does not match global required filters [%s]", clusterRequireFilters); + } + } + if (clusterIncludeFilters != null) { + if (!clusterIncludeFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node does not match global include filters [%s]", clusterIncludeFilters); + } + } + if (clusterExcludeFilters != null) { + if (clusterExcludeFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node matches global exclude filters [%s]", clusterExcludeFilters); + } + } + return null; } private void setClusterRequireFilters(Settings settings) { From e855282258b2d909a3ac3c7e56f260ad173ddb28 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 11:15:37 +0100 Subject: [PATCH 260/322] Simplify numShards calculation --- .../allocator/BalancedShardsAllocator.java | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 9f34bd139d5..5f76ca5d3be 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -816,8 +816,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards static class ModelNode implements Iterable { private final String id; private final Map indices = new HashMap<>(); - /* cached stats - invalidated on add/remove and lazily calculated */ - private int numShards = -1; + private int numShards = 0; public ModelNode(String id) { this.id = id; @@ -832,13 +831,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public int numShards() { - if (numShards == -1) { - int sum = 0; - for (ModelIndex index : indices.values()) { - sum += index.numShards(); - } - numShards = sum; - } return numShards; } @@ -856,17 +848,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public void addShard(ShardRouting shard, Decision decision) { - numShards = -1; ModelIndex index = indices.get(shard.index()); if (index == null) { index = new ModelIndex(shard.index()); indices.put(index.getIndexId(), index); } index.addShard(shard, decision); + numShards++; } public Decision removeShard(ShardRouting shard) { - numShards = -1; ModelIndex index = indices.get(shard.index()); Decision removed = null; if (index != null) { @@ -875,6 +866,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards indices.remove(shard.index()); } } + numShards--; return removed; } From eea791de15148b72e411ef335c685702f6d87844 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 11:20:13 +0100 Subject: [PATCH 261/322] Remove superfluous method numPrimaries --- .../allocator/BalancedShardsAllocator.java | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 5f76ca5d3be..c7632736e41 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -892,7 +892,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards static final class ModelIndex { private final String id; private final Map shards = new HashMap<>(); - private int numPrimaries = -1; private int highestPrimary = -1; public ModelIndex(String id) { @@ -924,26 +923,13 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return shards.keySet(); } - public int numPrimaries() { - if (numPrimaries == -1) { - int num = 0; - for (ShardRouting shard : shards.keySet()) { - if (shard.primary()) { - num++; - } - } - return numPrimaries = num; - } - return numPrimaries; - } - public Decision removeShard(ShardRouting shard) { - highestPrimary = numPrimaries = -1; + highestPrimary = -1; return shards.remove(shard); } public void addShard(ShardRouting shard, Decision decision) { - highestPrimary = numPrimaries = -1; + highestPrimary = -1; assert decision != null; assert !shards.containsKey(shard) : "Shard already allocated on current node: " + shards.get(shard) + " " + shard; shards.put(shard, decision); From 7e134da7d3474ffb463e585e8dcf241bb79050c1 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 11:32:11 +0100 Subject: [PATCH 262/322] Cache result of RoutingNodes.node(...) in ModelNode --- .../allocator/BalancedShardsAllocator.java | 27 ++++++++++++------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index c7632736e41..80f634e13cf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -349,7 +349,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards for (int i = 0; i < modelNodes.length; i++) { ModelNode modelNode = modelNodes[i]; if (modelNode.getIndex(index) != null - || deciders.canAllocate(indexMetaData, routingNodes.node(modelNode.getNodeId()), allocation).type() != Type.NO) { + || deciders.canAllocate(indexMetaData, modelNode.getRoutingNode(routingNodes), allocation).type() != Type.NO) { // swap nodes at position i and relevantNodes modelNodes[i] = modelNodes[relevantNodes]; modelNodes[relevantNodes] = modelNode; @@ -527,7 +527,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (currentNode.getNodeId().equals(node.nodeId())) { continue; } - RoutingNode target = routingNodes.node(currentNode.getNodeId()); + RoutingNode target = currentNode.getRoutingNode(routingNodes); Decision allocationDecision = allocation.deciders().canAllocate(shard, target, allocation); Decision rebalanceDecision = allocation.deciders().canRebalance(shard, allocation); Decision decision = new Decision.Multi().add(allocationDecision).add(rebalanceDecision); @@ -661,7 +661,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * don't check deciders */ if (currentWeight <= minWeight) { - Decision currentDecision = deciders.canAllocate(shard, routingNodes.node(node.getNodeId()), allocation); + Decision currentDecision = deciders.canAllocate(shard, node.getRoutingNode(routingNodes), allocation); NOUPDATE: if (currentDecision.type() == Type.YES || currentDecision.type() == Type.THROTTLE) { if (currentWeight == minWeight) { @@ -707,11 +707,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (logger.isTraceEnabled()) { logger.trace("Assigned shard [{}] to [{}]", shard, minNode.getNodeId()); } - routingNodes.initialize(shard, routingNodes.node(minNode.getNodeId()).nodeId(), allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.initialize(shard, minNode.getNodeId(), allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); changed = true; continue; // don't add to ignoreUnassigned } else { - final RoutingNode node = routingNodes.node(minNode.getNodeId()); + final RoutingNode node = minNode.getRoutingNode(routingNodes); if (deciders.canAllocate(node, allocation).type() != Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Can not allocate on node [{}] remove from round decision [{}]", node, decision.type()); @@ -755,13 +755,12 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards logger.trace("Try relocating shard for index index [{}] from node [{}] to node [{}]", idx, maxNode.getNodeId(), minNode.getNodeId()); } - final RoutingNode node = routingNodes.node(minNode.getNodeId()); ShardRouting candidate = null; final AllocationDeciders deciders = allocation.deciders(); for (ShardRouting shard : index.getAllShards()) { if (shard.started()) { // skip initializing, unassigned and relocating shards we can't relocate them anyway - Decision allocationDecision = deciders.canAllocate(shard, node, allocation); + Decision allocationDecision = deciders.canAllocate(shard, minNode.getRoutingNode(routingNodes), allocation); Decision rebalanceDecision = deciders.canRebalance(shard, allocation); if (((allocationDecision.type() == Type.YES) || (allocationDecision.type() == Type.THROTTLE)) && ((rebalanceDecision.type() == Type.YES) || (rebalanceDecision.type() == Type.THROTTLE))) { @@ -793,11 +792,10 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } /* now allocate on the cluster - if we are started we need to relocate the shard */ if (candidate.started()) { - RoutingNode lowRoutingNode = routingNodes.node(minNode.getNodeId()); - routingNodes.relocate(candidate, lowRoutingNode.nodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.relocate(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); } else { - routingNodes.initialize(candidate, routingNodes.node(minNode.getNodeId()).nodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.initialize(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); } return true; @@ -817,6 +815,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final String id; private final Map indices = new HashMap<>(); private int numShards = 0; + // lazily calculated + private RoutingNode routingNode; public ModelNode(String id) { this.id = id; @@ -830,6 +830,13 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return id; } + public RoutingNode getRoutingNode(RoutingNodes routingNodes) { + if (routingNode == null) { + routingNode = routingNodes.node(id); + } + return routingNode; + } + public int numShards() { return numShards; } From bffbad3c1996d8ed00f812f0b606fc82dbdf2f46 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 28 Dec 2015 18:38:50 +0100 Subject: [PATCH 263/322] Remove object notation for core types. When specifying a string field, you can either do: ``` { "foo": "bar" } ``` or ``` { "foo": { "value": "bar", "boost": 42 } } ``` The latter option is now removed. Closes #15388 --- .../index/mapper/core/ByteFieldMapper.java | 4 +++- .../index/mapper/core/DateFieldMapper.java | 3 ++- .../index/mapper/core/DoubleFieldMapper.java | 4 +++- .../index/mapper/core/FloatFieldMapper.java | 4 +++- .../index/mapper/core/IntegerFieldMapper.java | 4 +++- .../index/mapper/core/LongFieldMapper.java | 4 +++- .../index/mapper/core/ShortFieldMapper.java | 4 +++- .../index/mapper/core/StringFieldMapper.java | 3 ++- .../mapper/boost/CustomBoostMappingTests.java | 10 ++++++++-- .../index/mapper/boost/FieldLevelBoostTests.java | 14 ++++++++++---- .../index/mapper/compound/CompoundTypesTests.java | 10 ++++++++-- docs/reference/migration/migrate_3_0.asciidoc | 14 ++++++++++++++ 12 files changed, 62 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index fdc50c25034..b1553d455d7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; @@ -251,7 +252,8 @@ public class ByteFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Byte objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 1bd018f3f6b..ddecb558880 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -489,7 +489,8 @@ public class DateFieldMapper extends NumberFieldMapper { dateAsString = fieldType().nullValueAsString(); } else if (token == XContentParser.Token.VALUE_NUMBER) { dateAsString = parser.text(); - } else if (token == XContentParser.Token.START_OBJECT) { + } else if (token == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 18fd27883cd..0497fcd394c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -243,7 +244,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Double objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 4f8477b5f90..9aa690e0515 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -255,7 +256,8 @@ public class FloatFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Float objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 5fda6045928..343e0b8611c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -260,7 +261,8 @@ public class IntegerFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Integer objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 02cfa7f81e7..70261d7dc43 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -248,7 +249,8 @@ public class LongFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Long objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 7c232c46fb5..fdd7ab34819 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -256,7 +257,8 @@ public class ShortFieldMapper extends NumberFieldMapper { if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Short objValue = fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 3885690c7ec..d0cb0b4dd1c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -344,7 +344,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return new ValueAndBoost(nullValue, defaultBoost); } - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; String value = nullValue; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index 5480992acb2..b5a54ce92bd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.index.mapper.boost; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -28,7 +31,10 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; public class CustomBoostMappingTests extends ESSingleNodeTestCase { - public void testCustomBoostValues() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatCustomBoostValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("s_field").field("type", "string").endObject() .startObject("l_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() @@ -40,7 +46,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index 599084de2e6..bb5aecd9ec9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -20,8 +20,11 @@ package org.elasticsearch.index.mapper.boost; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -33,7 +36,10 @@ import static org.hamcrest.Matchers.closeTo; /** */ public class FieldLevelBoostTests extends ESSingleNodeTestCase { - public void testFieldLevelBoost() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() @@ -45,7 +51,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); + DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("boost", 2.0).field("value", "some name").endObject() .startObject("int_field").field("boost", 3.0).field("value", 10).endObject() @@ -83,7 +89,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { assertThat((double) f.boost(), closeTo(9.0, 0.001)); } - public void testInvalidFieldLevelBoost() throws Exception { + public void testBackCompatInvalidFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() @@ -95,7 +101,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); + DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); try { docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("foo", "bar") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java index 5ecf7745e98..fa7bbf8f249 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.index.mapper.compound; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -29,14 +32,17 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class CompoundTypesTests extends ESSingleNodeTestCase { - public void testStringType() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatStringType() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "string").endObject() .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index d53740c198b..5524e64c4e2 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -232,6 +232,20 @@ will still be accepted for indices created before the upgrade to 3.0 for backwar compatibility, but it will have no effect. Indices created on or after 3.0 will reject this option. +==== Object notation + +Core types don't support the object notation anymore, which allowed to provide +values as follows: + +[source,json] +----- +{ + "value": "field_value", + "boost": 42 +} +---- + + [[breaking_30_plugins]] === Plugin changes From 387bdbd322a11ff0cd4a1345da7a906ad65832a5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 28 Dec 2015 21:36:34 +0100 Subject: [PATCH 264/322] Don't limit recoveries in RoutingTableTests --- .../org/elasticsearch/cluster/routing/RoutingTableTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index fcdef700b98..713bf0aa311 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -50,8 +50,8 @@ public class RoutingTableTests extends ESAllocationTestCase { private int totalNumberOfShards; private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private final AllocationService ALLOCATION_SERVICE = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", Integer.MAX_VALUE) // don't limit recoveries + .put("cluster.routing.allocation.node_initial_primaries_recoveries", Integer.MAX_VALUE) .build()); private ClusterState clusterState; From c6182cbd3799c9ea25cf0716be541e01f101b025 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 28 Dec 2015 22:11:15 +0100 Subject: [PATCH 265/322] Fail replica shards before primary shards As failing primaries also fail associated replicas, we must fail replicas first so that their nodes are properly added to ignore list Closes #15686 --- .../cluster/routing/allocation/AllocationService.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 774bcb078f5..25937595556 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Set; @@ -181,7 +182,10 @@ public class AllocationService extends AbstractComponent { routingNodes.unassigned().shuffle(); FailedRerouteAllocation allocation = new FailedRerouteAllocation(allocationDeciders, routingNodes, clusterState.nodes(), failedShards, clusterInfoService.getClusterInfo()); boolean changed = false; - for (FailedRerouteAllocation.FailedShard failedShard : failedShards) { + // as failing primaries also fail associated replicas, we fail replicas first here so that their nodes are added to ignore list + List orderedFailedShards = new ArrayList<>(failedShards); + orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary())); + for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) { changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, System.nanoTime(), System.currentTimeMillis())); } From 180ab2493e96223479c2d5efd9fdd0f28fd12fee Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 28 Dec 2015 22:38:55 -0500 Subject: [PATCH 266/322] Improve thirdPartyAudit check, round 3 --- .../org/elasticsearch/gradle/AntTask.groovy | 3 + .../precommit/ThirdPartyAuditTask.groovy | 229 +- .../resources/forbidden/third-party-audit.txt | 98 + core/build.gradle | 119 +- .../elasticsearch/bootstrap/untrusted.policy | 4 - modules/lang-expression/build.gradle | 6 +- .../licenses/asm-commons-LICENSE.txt | 26 - .../licenses/asm-commons-NOTICE.txt | 1 - .../licenses/asm-tree-5.0.4.jar.sha1 | 1 + modules/lang-groovy/build.gradle | 36 +- .../licenses/groovy-2.4.4-indy.jar.sha1 | 1 + ...ovy-all-LICENSE.txt => groovy-LICENSE.txt} | 0 ...roovy-all-NOTICE.txt => groovy-NOTICE.txt} | 0 .../licenses/groovy-all-2.4.4-indy.jar.sha1 | 1 - .../licenses/groovy-all-LICENSE-ANTLR.txt | 31 - .../licenses/groovy-all-LICENSE-ASM.txt | 31 - .../licenses/groovy-all-LICENSE-CLI.txt | 202 -- .../licenses/groovy-all-LICENSE-JSR223.txt | 30 - .../plugin-metadata/plugin-security.policy | 1 - .../script/groovy/GroovySecurityTests.java | 2 - plugins/discovery-azure/build.gradle | 43 +- plugins/discovery-ec2/build.gradle | 17 +- plugins/discovery-gce/build.gradle | 12 +- plugins/lang-plan-a/build.gradle | 8 +- .../licenses/asm-commons-LICENSE.txt | 26 - .../licenses/asm-commons-NOTICE.txt | 1 - .../licenses/asm-tree-5.0.4.jar.sha1 | 1 + plugins/lang-python/build.gradle | 855 ++++---- plugins/mapper-attachments/build.gradle | 1927 ++++++++++++++++- plugins/repository-hdfs/build.gradle | 265 ++- plugins/repository-s3/build.gradle | 17 +- qa/evil-tests/build.gradle | 17 +- test/framework/build.gradle | 30 +- 33 files changed, 3140 insertions(+), 901 deletions(-) create mode 100644 buildSrc/src/main/resources/forbidden/third-party-audit.txt delete mode 100644 modules/lang-expression/licenses/asm-commons-LICENSE.txt delete mode 100644 modules/lang-expression/licenses/asm-commons-NOTICE.txt create mode 100644 modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 create mode 100644 modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 rename modules/lang-groovy/licenses/{groovy-all-LICENSE.txt => groovy-LICENSE.txt} (100%) rename modules/lang-groovy/licenses/{groovy-all-NOTICE.txt => groovy-NOTICE.txt} (100%) delete mode 100644 modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 delete mode 100644 modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt delete mode 100644 modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt delete mode 100644 modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt delete mode 100644 modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt delete mode 100644 plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt delete mode 100644 plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt create mode 100644 plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy index 5d7486371eb..0393e7632bb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -58,6 +58,9 @@ public abstract class AntTask extends DefaultTask { ant.project.removeBuildListener(listener) } + // otherwise groovy replaces System.out, and you have no chance to debug + // ant.saveStreams = false + final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) BuildLogger antLogger = makeLogger(stream, outputLevel) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 2ee4c29d614..3dda6b186ac 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -16,51 +16,39 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.precommit +package org.elasticsearch.gradle.precommit; -import org.apache.tools.ant.BuildLogger -import org.apache.tools.ant.DefaultLogger -import org.apache.tools.ant.Project -import org.elasticsearch.gradle.AntTask -import org.gradle.api.artifacts.Configuration -import org.gradle.api.file.FileCollection +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.BuildListener; +import org.apache.tools.ant.BuildLogger; +import org.apache.tools.ant.DefaultLogger; +import org.apache.tools.ant.Project; +import org.elasticsearch.gradle.AntTask; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.FileCollection; -import java.nio.file.FileVisitResult -import java.nio.file.Files -import java.nio.file.Path -import java.nio.file.SimpleFileVisitor -import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Basic static checking to keep tabs on third party JARs */ public class ThirdPartyAuditTask extends AntTask { - - // true to be lenient about MISSING CLASSES - private boolean missingClasses; // patterns for classes to exclude, because we understand their issues private String[] excludes = new String[0]; ThirdPartyAuditTask() { - dependsOn(project.configurations.testCompile) - description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'" - } - - /** - * Set to true to be lenient with missing classes. By default this check will fail if it finds - * MISSING CLASSES. This means the set of jars is incomplete. However, in some cases - * this can be due to intentional exclusions that are well-tested and understood. - */ - public void setMissingClasses(boolean value) { - missingClasses = value; - } - - /** - * Returns true if leniency about missing classes is enabled. - */ - public boolean isMissingClasses() { - return missingClasses; + // we depend on this because its the only reliable configuration + // this probably makes the build slower: gradle you suck here when it comes to configurations, you pay the price. + dependsOn(project.configurations.testCompile); + description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'"; } /** @@ -70,7 +58,7 @@ public class ThirdPartyAuditTask extends AntTask { public void setExcludes(String[] classes) { for (String s : classes) { if (s.indexOf('*') != -1) { - throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!") + throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!"); } } excludes = classes; @@ -83,29 +71,78 @@ public class ThirdPartyAuditTask extends AntTask { return excludes; } + // yes, we parse Uwe Schindler's errors to find missing classes, and to keep a continuous audit. Just don't let him know! + static final Pattern MISSING_CLASS_PATTERN = + Pattern.compile(/WARNING: The referenced class '(.*)' cannot be loaded\. Please fix the classpath\!/); + + static final Pattern VIOLATION_PATTERN = + Pattern.compile(/\s\sin ([a-zA-Z0-9\$\.]+) \(.*\)/); + + // we log everything and capture errors and handle them with our whitelist + // this is important, as we detect stale whitelist entries, workaround forbidden apis bugs, + // and it also allows whitelisting missing classes! + static class EvilLogger extends DefaultLogger { + final Set missingClasses = new TreeSet<>(); + final Map> violations = new TreeMap<>(); + String previousLine = null; + + @Override + public void messageLogged(BuildEvent event) { + if (event.getTask().getClass() == de.thetaphi.forbiddenapis.ant.AntTask.class) { + if (event.getPriority() == Project.MSG_WARN) { + Matcher m = MISSING_CLASS_PATTERN.matcher(event.getMessage()); + if (m.matches()) { + missingClasses.add(m.group(1).replace('.', '/') + ".class"); + } + } else if (event.getPriority() == Project.MSG_ERR) { + Matcher m = VIOLATION_PATTERN.matcher(event.getMessage()); + if (m.matches()) { + String violation = previousLine + '\n' + event.getMessage(); + String clazz = m.group(1).replace('.', '/') + ".class"; + List current = violations.get(clazz); + if (current == null) { + current = new ArrayList<>(); + violations.put(clazz, current); + } + current.add(violation); + } + previousLine = event.getMessage(); + } + } + super.messageLogged(event); + } + } + @Override protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { - return new DefaultLogger( - errorPrintStream: stream, - outputPrintStream: stream, - // ignore passed in outputLevel for now, until we are filtering warning messages - messageOutputLevel: Project.MSG_ERR) + DefaultLogger log = new EvilLogger(); + log.errorPrintStream = stream; + log.outputPrintStream = stream; + log.messageOutputLevel = outputLevel; + return log; } @Override protected void runAnt(AntBuilder ant) { - ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask) + Configuration configuration = project.configurations.findByName('runtime'); + if (configuration == null) { + // some projects apparently do not have 'runtime'? what a nice inconsistency, + // basically only serves to waste time in build logic! + configuration = project.configurations.findByName('testCompile'); + } + assert configuration != null; + ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask); // we only want third party dependencies. - FileCollection jars = project.configurations.testCompile.fileCollection({ dependency -> + FileCollection jars = configuration.fileCollection({ dependency -> dependency.group.startsWith("org.elasticsearch") == false - }) + }); // we don't want provided dependencies, which we have already scanned. e.g. don't // scan ES core's dependencies for every single plugin - Configuration provided = project.configurations.findByName('provided') + Configuration provided = project.configurations.findByName('provided'); if (provided != null) { - jars -= provided + jars -= provided; } // no dependencies matched, we are done @@ -113,72 +150,101 @@ public class ThirdPartyAuditTask extends AntTask { return; } - // print which jars we are going to scan, always // this is not the time to try to be succinct! Forbidden will print plenty on its own! - Set names = new HashSet<>() + Set names = new TreeSet<>(); for (File jar : jars) { - names.add(jar.getName()) - } - logger.error("[thirdPartyAudit] Scanning: " + names) - - // warn that classes are missing - // TODO: move these to excludes list! - if (missingClasses) { - logger.warn("[thirdPartyAudit] WARNING: CLASSES ARE MISSING! Expect NoClassDefFoundError in bug reports from users!") + names.add(jar.getName()); } // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, // and then remove our temp dir afterwards. don't complain: try it yourself. // we don't use gradle temp dir handling, just google it, or try it yourself. - File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit') + File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit'); // clean up any previous mess (if we failed), then unzip everything to one directory - ant.delete(dir: tmpDir.getAbsolutePath()) - tmpDir.mkdirs() + ant.delete(dir: tmpDir.getAbsolutePath()); + tmpDir.mkdirs(); for (File jar : jars) { - ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()) + ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()); } // convert exclusion class names to binary file names String[] excludedFiles = new String[excludes.length]; for (int i = 0; i < excludes.length; i++) { - excludedFiles[i] = excludes[i].replace('.', '/') + ".class" - // check if the excluded file exists, if not, sure sign things are outdated - if (! new File(tmpDir, excludedFiles[i]).exists()) { - throw new IllegalStateException("bogus thirdPartyAudit exclusion: '" + excludes[i] + "', not found in any dependency") - } + excludedFiles[i] = excludes[i].replace('.', '/') + ".class"; } + Set excludedSet = new TreeSet<>(Arrays.asList(excludedFiles)); // jarHellReprise - checkSheistyClasses(tmpDir.toPath(), new HashSet<>(Arrays.asList(excludedFiles))); + Set sheistySet = getSheistyClasses(tmpDir.toPath()); - ant.thirdPartyAudit(internalRuntimeForbidden: true, + try { + ant.thirdPartyAudit(internalRuntimeForbidden: false, failOnUnsupportedJava: false, - failOnMissingClasses: !missingClasses, - classpath: project.configurations.testCompile.asPath) { - fileset(dir: tmpDir, excludes: excludedFiles.join(',')) + failOnMissingClasses: false, + signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()), + classpath: configuration.asPath) { + fileset(dir: tmpDir) + } + } catch (BuildException ignore) {} + + EvilLogger evilLogger = null; + for (BuildListener listener : ant.project.getBuildListeners()) { + if (listener instanceof EvilLogger) { + evilLogger = (EvilLogger) listener; + break; + } } + assert evilLogger != null; + + // keep our whitelist up to date + Set bogusExclusions = new TreeSet<>(excludedSet); + bogusExclusions.removeAll(sheistySet); + bogusExclusions.removeAll(evilLogger.missingClasses); + bogusExclusions.removeAll(evilLogger.violations.keySet()); + if (!bogusExclusions.isEmpty()) { + throw new IllegalStateException("Invalid exclusions, nothing is wrong with these classes: " + bogusExclusions); + } + + // don't duplicate classes with the JDK + sheistySet.removeAll(excludedSet); + if (!sheistySet.isEmpty()) { + throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet); + } + + // don't allow a broken classpath + evilLogger.missingClasses.removeAll(excludedSet); + if (!evilLogger.missingClasses.isEmpty()) { + throw new IllegalStateException("CLASSES ARE MISSING! " + evilLogger.missingClasses); + } + + // don't use internal classes + evilLogger.violations.keySet().removeAll(excludedSet); + if (!evilLogger.violations.isEmpty()) { + throw new IllegalStateException("VIOLATIONS WERE FOUND! " + evilLogger.violations); + } + // clean up our mess (if we succeed) - ant.delete(dir: tmpDir.getAbsolutePath()) + ant.delete(dir: tmpDir.getAbsolutePath()); } - + /** * check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk! */ - private void checkSheistyClasses(Path root, Set excluded) { + private Set getSheistyClasses(Path root) { // system.parent = extensions loader. // note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!). // but groovy/gradle needs to work at all first! - ClassLoader ext = ClassLoader.getSystemClassLoader().getParent() - assert ext != null + ClassLoader ext = ClassLoader.getSystemClassLoader().getParent(); + assert ext != null; Set sheistySet = new TreeSet<>(); Files.walkFileTree(root, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - String entry = root.relativize(file).toString() + String entry = root.relativize(file).toString(); if (entry.endsWith(".class")) { if (ext.getResource(entry) != null) { sheistySet.add(entry); @@ -187,19 +253,6 @@ public class ThirdPartyAuditTask extends AntTask { return FileVisitResult.CONTINUE; } }); - - // check if we are ok - if (sheistySet.isEmpty()) { - return; - } - - // leniency against exclusions list - sheistySet.removeAll(excluded); - - if (sheistySet.isEmpty()) { - logger.warn("[thirdPartyAudit] WARNING: JAR HELL WITH JDK! Expect insanely hard-to-debug problems!") - } else { - throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet); - } + return sheistySet; } } diff --git a/buildSrc/src/main/resources/forbidden/third-party-audit.txt b/buildSrc/src/main/resources/forbidden/third-party-audit.txt new file mode 100644 index 00000000000..0346d6d8a58 --- /dev/null +++ b/buildSrc/src/main/resources/forbidden/third-party-audit.txt @@ -0,0 +1,98 @@ +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +# Checks that we run against bytecode of third-party dependencies +# +# Be judicious about what is denied here: MANY classes will be subject +# to these rules, so please try to keep the false positive rate low! +# +# Each third party .class failing checks will need to be explicitly +# listed in the module's build.gradle file: +# +# thirdPartyAudit.excludes = [ +# // uses internal java api: sun.misc.Unsafe +# 'org.foo.Bar', +# // missing class! +# 'com.missing.dependency.WTF', +# // ... +# ] +# +# Wildcards are not allowed, excludes must be exact. The build also fails with +# the message "Invalid exclusions, nothing is wrong with these classes" if +# extraneous classes are in the excludes list, this ensures the list is +# up-to-date, and that each module accurately documents the evil things +# that its dependencies do. +# +# For more information, look at ThirdPartyAuditTask.groovy in buildSrc/ + +# +# Ruleset to fail on java internal apis, using this logic: +# http://docs.oracle.com/javase/8/docs/api/java/lang/SecurityManager.html#checkPackageAccess-java.lang.String- +# +# // The list may change at any time, regenerated with: +# for (String pkg : new TreeSet<>(Arrays.asList( +# Security.getProperty("package.access").split(",")))) { +# System.out.println(pkg + "**"); +# } +# +@defaultMessage non-public internal runtime class +com.oracle.webservices.internal.** +com.oracle.xmlns.internal.** +com.sun.activation.registries.** +com.sun.browser.** +com.sun.corba.se.** +com.sun.glass.** +com.sun.imageio.** +com.sun.istack.internal.** +com.sun.javafx.** +com.sun.jmx.** +com.sun.media.** +com.sun.media.sound.** +com.sun.naming.internal.** +com.sun.openpisces.** +com.sun.org.apache.bcel.internal.** +com.sun.org.apache.regexp.internal.** +com.sun.org.apache.xalan.internal.extensions.** +com.sun.org.apache.xalan.internal.lib.** +com.sun.org.apache.xalan.internal.res.** +com.sun.org.apache.xalan.internal.templates.** +com.sun.org.apache.xalan.internal.utils.** +com.sun.org.apache.xalan.internal.xslt.** +com.sun.org.apache.xalan.internal.xsltc.cmdline.** +com.sun.org.apache.xalan.internal.xsltc.compiler.** +com.sun.org.apache.xalan.internal.xsltc.trax.** +com.sun.org.apache.xalan.internal.xsltc.util.** +com.sun.org.apache.xerces.internal.** +com.sun.org.apache.xml.internal.res.** +com.sun.org.apache.xml.internal.security.** +com.sun.org.apache.xml.internal.serializer.utils.** +com.sun.org.apache.xml.internal.utils.** +com.sun.org.apache.xpath.internal.** +com.sun.org.glassfish.** +com.sun.pisces.** +com.sun.prism.** +com.sun.proxy.** +com.sun.scenario.** +com.sun.t2k.** +com.sun.webkit.** +com.sun.xml.internal.** +jdk.internal.** +jdk.management.resource.internal.** +jdk.nashorn.internal.** +jdk.nashorn.tools.** +oracle.jrockit.jfr.** +org.jcp.xml.dsig.internal.** +sun.** diff --git a/core/build.gradle b/core/build.gradle index 9bbf8bc1593..7b80449729e 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -111,12 +111,121 @@ forbiddenPatterns { exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt' } -// classes are missing, e.g. org.jboss.marshalling.Marshaller -thirdPartyAudit.missingClasses = true -// uses internal sun ssl classes! thirdPartyAudit.excludes = [ - // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) - 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + + // classes are missing! + + // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) + 'com.fasterxml.jackson.databind.ObjectMapper', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder (netty) + 'com.google.protobuf.CodedInputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender (netty) + 'com.google.protobuf.CodedOutputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufDecoder (netty) + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLite', + 'com.google.protobuf.Parser', + + // from org.apache.log4j.receivers.net.JMSReceiver (log4j-extras) + 'javax.jms.Message', + 'javax.jms.MessageListener', + 'javax.jms.ObjectMessage', + 'javax.jms.TopicConnection', + 'javax.jms.TopicConnectionFactory', + 'javax.jms.TopicPublisher', + 'javax.jms.TopicSession', + 'javax.jms.TopicSubscriber', + + // from org.apache.log4j.net.SMTPAppender (log4j) + 'javax.mail.Authenticator', + 'javax.mail.Message$RecipientType', + 'javax.mail.Message', + 'javax.mail.Multipart', + 'javax.mail.PasswordAuthentication', + 'javax.mail.Session', + 'javax.mail.Transport', + 'javax.mail.internet.InternetAddress', + 'javax.mail.internet.InternetHeaders', + 'javax.mail.internet.MimeBodyPart', + 'javax.mail.internet.MimeMessage', + 'javax.mail.internet.MimeMultipart', + 'javax.mail.internet.MimeUtility', + + // from org.jboss.netty.channel.socket.http.HttpTunnelingServlet (netty) + 'javax.servlet.ServletConfig', + 'javax.servlet.ServletException', + 'javax.servlet.ServletOutputStream', + 'javax.servlet.http.HttpServlet', + 'javax.servlet.http.HttpServletRequest', + 'javax.servlet.http.HttpServletResponse', + + // from org.jboss.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from org.apache.lucene.sandbox.queries.regex.JakartaRegexpCapabilities$JakartaRegexMatcher (lucene-sandbox) + 'org.apache.regexp.CharacterIterator', + 'org.apache.regexp.RE', + 'org.apache.regexp.REProgram', + + // from org.jboss.netty.handler.ssl.OpenSslEngine (netty) + 'org.apache.tomcat.jni.Buffer', + 'org.apache.tomcat.jni.Library', + 'org.apache.tomcat.jni.Pool', + 'org.apache.tomcat.jni.SSL', + 'org.apache.tomcat.jni.SSLContext', + + // from org.jboss.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.asn1.x500.X500Name', + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', + 'org.bouncycastle.jce.provider.BouncyCastleProvider', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + + // from org.jboss.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from org.jboss.netty.logging.JBossLoggerFactory (netty) + 'org.jboss.logging.Logger', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from org.jboss.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from org.jboss.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from com.spatial4j.core.io.GeoJSONReader (spatial4j) + 'org.noggit.JSONParser', + + // from org.jboss.netty.container.osgi.NettyBundleActivator (netty) + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + + // from org.jboss.netty.logging.OsgiLoggerFactory$1 (netty) + 'org.osgi.framework.ServiceReference', + 'org.osgi.service.log.LogService', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + + 'org.slf4j.impl.StaticMDCBinder', + 'org.slf4j.impl.StaticMarkerBinder', ] // dependency license are currently checked in distribution diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy index 8e7ca8d8b6e..8078516c7d5 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy @@ -26,10 +26,6 @@ grant { // groovy IndyInterface bootstrap requires this property for indy logging permission java.util.PropertyPermission "groovy.indy.logging", "read"; - // groovy JsonOutput, just allow it to read these props so it works (unsafe is not allowed) - permission java.util.PropertyPermission "groovy.json.faststringutils.disable", "read"; - permission java.util.PropertyPermission "groovy.json.faststringutils.write.to.final.fields", "read"; - // needed by Rhino engine exception handling permission java.util.PropertyPermission "rhino.stack.style", "read"; diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 5563fdafe36..9e3943a32b2 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -27,16 +27,14 @@ dependencies { compile 'org.antlr:antlr4-runtime:4.5.1-1' compile 'org.ow2.asm:asm:5.0.4' compile 'org.ow2.asm:asm-commons:5.0.4' + compile 'org.ow2.asm:asm-tree:5.0.4' } dependencyLicenses { mapping from: /lucene-.*/, to: 'lucene' + mapping from: /asm-.*/, to: 'asm' } -// do we or do we not depend on asm-tree, that is the question -// classes are missing, e.g. org.objectweb.asm.tree.LabelNode -thirdPartyAudit.missingClasses = true - compileJava.options.compilerArgs << '-Xlint:-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' diff --git a/modules/lang-expression/licenses/asm-commons-LICENSE.txt b/modules/lang-expression/licenses/asm-commons-LICENSE.txt deleted file mode 100644 index afb064f2f26..00000000000 --- a/modules/lang-expression/licenses/asm-commons-LICENSE.txt +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2012 France Télécom -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -3. Neither the name of the copyright holders nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/lang-expression/licenses/asm-commons-NOTICE.txt b/modules/lang-expression/licenses/asm-commons-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/modules/lang-expression/licenses/asm-commons-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 new file mode 100644 index 00000000000..5822a485a61 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 @@ -0,0 +1 @@ +396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 7ffb5626d4a..7db4eab4403 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -23,7 +23,7 @@ esplugin { } dependencies { - compile 'org.codehaus.groovy:groovy-all:2.4.4:indy' + compile 'org.codehaus.groovy:groovy:2.4.4:indy' } compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast,-deprecation' @@ -36,11 +36,33 @@ integTest { } } -// classes are missing, e.g. jline.console.completer.Completer -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: sun.misc.Unsafe - 'groovy.json.internal.FastStringUtils', - 'groovy.json.internal.FastStringUtils$StringImplementation$1', - 'groovy.json.internal.FastStringUtils$StringImplementation$2', + // classes are missing, we bring in a minimal groovy dist + // for example we do not need ivy, scripts arent allowed to download code + 'com.thoughtworks.xstream.XStream', + 'groovyjarjarasm.asm.util.Textifiable', + 'org.apache.ivy.Ivy', + 'org.apache.ivy.core.event.IvyListener', + 'org.apache.ivy.core.event.download.PrepareDownloadEvent', + 'org.apache.ivy.core.event.resolve.StartResolveEvent', + 'org.apache.ivy.core.module.descriptor.Configuration', + 'org.apache.ivy.core.module.descriptor.DefaultDependencyArtifactDescriptor', + 'org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor', + 'org.apache.ivy.core.module.descriptor.DefaultExcludeRule', + 'org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor', + 'org.apache.ivy.core.module.id.ArtifactId', + 'org.apache.ivy.core.module.id.ModuleId', + 'org.apache.ivy.core.module.id.ModuleRevisionId', + 'org.apache.ivy.core.report.ResolveReport', + 'org.apache.ivy.core.resolve.ResolveOptions', + 'org.apache.ivy.core.settings.IvySettings', + 'org.apache.ivy.plugins.matcher.ExactPatternMatcher', + 'org.apache.ivy.plugins.matcher.PatternMatcher', + 'org.apache.ivy.plugins.resolver.IBiblioResolver', + 'org.apache.ivy.util.DefaultMessageLogger', + 'org.apache.ivy.util.Message', + 'org.fusesource.jansi.Ansi$Attribute', + 'org.fusesource.jansi.Ansi$Color', + 'org.fusesource.jansi.Ansi', + 'org.fusesource.jansi.AnsiRenderWriter', ] diff --git a/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 b/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 new file mode 100644 index 00000000000..30b996327b4 --- /dev/null +++ b/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 @@ -0,0 +1 @@ +139af316ac35534120c53f05393ce46d60d6da48 \ No newline at end of file diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE.txt b/modules/lang-groovy/licenses/groovy-LICENSE.txt similarity index 100% rename from modules/lang-groovy/licenses/groovy-all-LICENSE.txt rename to modules/lang-groovy/licenses/groovy-LICENSE.txt diff --git a/modules/lang-groovy/licenses/groovy-all-NOTICE.txt b/modules/lang-groovy/licenses/groovy-NOTICE.txt similarity index 100% rename from modules/lang-groovy/licenses/groovy-all-NOTICE.txt rename to modules/lang-groovy/licenses/groovy-NOTICE.txt diff --git a/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 b/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 deleted file mode 100644 index 458716cefdf..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -574a15e35eba5f986a0564ae197c78e843ece954 diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt deleted file mode 100644 index d62cc1ab2d7..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt +++ /dev/null @@ -1,31 +0,0 @@ - -SOFTWARE RIGHTS - -ANTLR 1989-2006 Developed by Terence Parr -Partially supported by University of San Francisco & jGuru.com - -We reserve no legal rights to the ANTLR--it is fully in the -public domain. An individual or company may do whatever -they wish with source code distributed with ANTLR or the -code generated by ANTLR, including the incorporation of -ANTLR, or its output, into commerical software. - -We encourage users to develop software with ANTLR. However, -we do ask that credit is given to us for developing -ANTLR. By "credit", we mean that if you use ANTLR or -incorporate any source code into one of your programs -(commercial product, research project, or otherwise) that -you acknowledge this fact somewhere in the documentation, -research report, etc... If you like ANTLR and have -developed a nice tool with the output, please mention that -you developed it using ANTLR. In addition, we ask that the -headers remain intact in our source code. As long as these -guidelines are kept, we expect to continue enhancing this -system and expect to make other tools available as they are -completed. - -The primary ANTLR guy: - -Terence Parr -parrt@cs.usfca.edu -parrt@antlr.org diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt deleted file mode 100644 index ae898f75545..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt +++ /dev/null @@ -1,31 +0,0 @@ -/*** - * http://asm.objectweb.org/ - * - * ASM: a very small and fast Java bytecode manipulation framework - * Copyright (c) 2000-2005 INRIA, France Telecom - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holders nor the names of its - * contributors may be used to endorse or promote products derived from - * this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF - * THE POSSIBILITY OF SUCH DAMAGE. - */ diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt deleted file mode 100644 index 57bc88a15a0..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt +++ /dev/null @@ -1,202 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt deleted file mode 100644 index b750c0f5f7b..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt +++ /dev/null @@ -1,30 +0,0 @@ -The following notice applies to the files: - -src/main/org/codehaus/groovy/jsr223/GroovyCompiledScript.java -src/main/org/codehaus/groovy/jsr223/GroovyScriptEngineFactory.java -src/main/org/codehaus/groovy/jsr223/GroovyScriptEngineImpl.java - - -/* - * Copyright 2006 Sun Microsystems, Inc. All rights reserved. - * Use is subject to license terms. - * - * Redistribution and use in source and binary forms, with or without modification, are - * permitted provided that the following conditions are met: Redistributions of source code - * must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, this list of - * conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. Neither the name of the Sun Microsystems nor the names of - * is contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS - * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY - * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON - * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE - * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ diff --git a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy index e1fd920d119..4ada1ad5f38 100644 --- a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy @@ -34,7 +34,6 @@ grant { permission org.elasticsearch.script.ClassPermission "<>"; // groovy runtime (TODO: clean these up if possible) permission org.elasticsearch.script.ClassPermission "groovy.grape.GrabAnnotationTransformation"; - permission org.elasticsearch.script.ClassPermission "groovy.json.JsonOutput"; permission org.elasticsearch.script.ClassPermission "groovy.lang.Binding"; permission org.elasticsearch.script.ClassPermission "groovy.lang.GroovyObject"; permission org.elasticsearch.script.ClassPermission "groovy.lang.GString"; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 446a4dfd562..f5c44c6eea1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -83,8 +83,6 @@ public class GroovySecurityTests extends ESTestCase { assertSuccess("def range = 1..doc['foo'].value; def v = range.get(0)"); // Maps assertSuccess("def v = doc['foo'].value; def m = [:]; m.put(\"value\", v)"); - // serialization to json (this is best effort considering the unsafe etc at play) - assertSuccess("def x = 5; groovy.json.JsonOutput.toJson(x)"); // Times assertSuccess("def t = Instant.now().getMillis()"); // GroovyCollections diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index ce80a441760..d85d08794ea 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -62,15 +62,38 @@ compileJava.options.compilerArgs << '-Xlint:-deprecation' // TODO: and why does this static not show up in maven... compileTestJava.options.compilerArgs << '-Xlint:-static' -// classes are missing, e.g. org.osgi.framework.BundleActivator -thirdPartyAudit.missingClasses = true -// TODO: figure out what is happening and fix this!!!!!!!!!!! -// there might be still some undetected jar hell! -// we need to fix https://github.com/policeman-tools/forbidden-apis/issues/91 first thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.xml.fastinfoset.stax.StAXDocumentParser - 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector', - 'com.sun.xml.bind.v2.runtime.unmarshaller.FastInfosetConnector$CharSequenceImpl', - // uses internal java api: com.sun.xml.fastinfoset.stax.StAXDocumentSerializer - 'com.sun.xml.bind.v2.runtime.output.FastInfosetStreamWriterOutput', + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.eclipse.persistence.descriptors.ClassDescriptor', + 'org.eclipse.persistence.internal.oxm.MappingNodeValue', + 'org.eclipse.persistence.internal.oxm.TreeObjectBuilder', + 'org.eclipse.persistence.internal.oxm.XPathFragment', + 'org.eclipse.persistence.internal.oxm.XPathNode', + 'org.eclipse.persistence.internal.queries.ContainerPolicy', + 'org.eclipse.persistence.jaxb.JAXBContext', + 'org.eclipse.persistence.jaxb.JAXBHelper', + 'org.eclipse.persistence.mappings.DatabaseMapping', + 'org.eclipse.persistence.mappings.converters.TypeConversionConverter', + 'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping', + 'org.eclipse.persistence.oxm.XMLContext', + 'org.eclipse.persistence.oxm.XMLDescriptor', + 'org.eclipse.persistence.oxm.XMLField', + 'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping', + 'org.eclipse.persistence.sessions.DatabaseSession', + 'org.jvnet.fastinfoset.VocabularyApplicationData', + 'org.jvnet.staxex.Base64Data', + 'org.jvnet.staxex.XMLStreamReaderEx', + 'org.jvnet.staxex.XMLStreamWriterEx', + 'org.osgi.framework.Bundle', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.BundleEvent', + 'org.osgi.framework.SynchronousBundleListener', + 'com.sun.xml.fastinfoset.stax.StAXDocumentParser', + 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer', ] diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 355dbc55164..403b2638257 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -49,11 +49,16 @@ test { systemProperty 'tests.artifact', project.name } -// classes are missing, e.g. org.apache.avalon.framework.logger.Logger -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext - 'com.amazonaws.util.XpathUtils', + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', + + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', ] diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index b054e0f37b8..6f4459ef753 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -32,5 +32,13 @@ test { systemProperty 'tests.artifact', project.name } -// classes are missing, e.g. org.apache.log.Logger -thirdPartyAudit.missingClasses = true +thirdPartyAudit.excludes = [ + // classes are missing + 'com.google.common.base.Splitter', + 'com.google.common.collect.Lists', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index 5f0ddafcc97..dc0cfca2fa7 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -28,14 +28,16 @@ dependencies { compile 'org.antlr:antlr4-runtime:4.5.1-1' compile 'org.ow2.asm:asm:5.0.4' compile 'org.ow2.asm:asm-commons:5.0.4' + compile 'org.ow2.asm:asm-tree:5.0.4' +} + +dependencyLicenses { + mapping from: /asm-.*/, to: 'asm' } compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes' compileTestJava.options.compilerArgs << '-Xlint:-unchecked' -// classes are missing, e.g. org.objectweb.asm.tree.LabelNode -thirdPartyAudit.missingClasses = true - // regeneration logic, comes in via ant right now // don't port it to gradle, it works fine. diff --git a/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt b/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt deleted file mode 100644 index afb064f2f26..00000000000 --- a/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2012 France Télécom -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -3. Neither the name of the copyright holders nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt b/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 b/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 new file mode 100644 index 00000000000..5822a485a61 --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 @@ -0,0 +1 @@ +396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index 1c33ad2d5ee..103a15784ea 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -36,380 +36,493 @@ integTest { } } -// classes are missing, e.g. org.tukaani.xz.FilterOptions -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) - 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', - // uses internal java api: sun.misc.Cleaner - 'org.python.netty.util.internal.Cleaner0', + // uses internal java api: sun.misc.Cleaner + 'org.python.netty.util.internal.Cleaner0', - // uses internal java api: sun.misc.Signal - 'jnr.posix.JavaPOSIX', - 'jnr.posix.JavaPOSIX$SunMiscSignalHandler', + // uses internal java api: sun.misc.Signal + 'jnr.posix.JavaPOSIX', + 'jnr.posix.JavaPOSIX$SunMiscSignalHandler', - // uses internal java api: sun.misc.Unsafe - 'com.kenai.jffi.MemoryIO$UnsafeImpl', - 'com.kenai.jffi.MemoryIO$UnsafeImpl32', - 'com.kenai.jffi.MemoryIO$UnsafeImpl64', - 'org.python.google.common.cache.Striped64', - 'org.python.google.common.cache.Striped64$1', - 'org.python.google.common.cache.Striped64$Cell', - 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.python.netty.util.internal.chmv8.ForkJoinPool$2', - 'org.python.netty.util.internal.PlatformDependent0', - 'org.python.netty.util.internal.UnsafeAtomicIntegerFieldUpdater', - 'org.python.netty.util.internal.UnsafeAtomicLongFieldUpdater', - 'org.python.netty.util.internal.UnsafeAtomicReferenceFieldUpdater', - 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8', - 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$1', - 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$TreeBin', - 'org.python.netty.util.internal.chmv8.CountedCompleter', - 'org.python.netty.util.internal.chmv8.CountedCompleter$1', - 'org.python.netty.util.internal.chmv8.ForkJoinPool', - 'org.python.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', - 'org.python.netty.util.internal.chmv8.ForkJoinTask', - 'org.python.netty.util.internal.chmv8.ForkJoinTask$1', + // uses internal java api: sun.misc.Unsafe + 'com.kenai.jffi.MemoryIO$UnsafeImpl', + 'com.kenai.jffi.MemoryIO$UnsafeImpl32', + 'com.kenai.jffi.MemoryIO$UnsafeImpl64', + 'org.python.google.common.cache.Striped64', + 'org.python.google.common.cache.Striped64$1', + 'org.python.google.common.cache.Striped64$Cell', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$2', + 'org.python.netty.util.internal.PlatformDependent0', + 'org.python.netty.util.internal.UnsafeAtomicIntegerFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicLongFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicReferenceFieldUpdater', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$1', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$TreeBin', + 'org.python.netty.util.internal.chmv8.CountedCompleter', + 'org.python.netty.util.internal.chmv8.CountedCompleter$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', + 'org.python.netty.util.internal.chmv8.ForkJoinTask', + 'org.python.netty.util.internal.chmv8.ForkJoinTask$1', - // "uberjaring" (but not shading) classes that have been in the JDK since 1.5 - // nice job python. - 'javax.xml.XMLConstants', - 'javax.xml.datatype.DatatypeConfigurationException', - 'javax.xml.datatype.DatatypeConstants$1', - 'javax.xml.datatype.DatatypeConstants$Field', - 'javax.xml.datatype.DatatypeConstants', - 'javax.xml.datatype.DatatypeFactory', - 'javax.xml.datatype.Duration', - 'javax.xml.datatype.FactoryFinder', - 'javax.xml.datatype.SecuritySupport$1', - 'javax.xml.datatype.SecuritySupport$2', - 'javax.xml.datatype.SecuritySupport$3', - 'javax.xml.datatype.SecuritySupport$4', - 'javax.xml.datatype.SecuritySupport$5', - 'javax.xml.datatype.SecuritySupport', - 'javax.xml.datatype.XMLGregorianCalendar', - 'javax.xml.namespace.NamespaceContext', - 'javax.xml.namespace.QName$1', - 'javax.xml.namespace.QName', - 'javax.xml.parsers.DocumentBuilder', - 'javax.xml.parsers.DocumentBuilderFactory', - 'javax.xml.parsers.FactoryConfigurationError', - 'javax.xml.parsers.FactoryFinder', - 'javax.xml.parsers.ParserConfigurationException', - 'javax.xml.parsers.SAXParser', - 'javax.xml.parsers.SAXParserFactory', - 'javax.xml.parsers.SecuritySupport$1', - 'javax.xml.parsers.SecuritySupport$2', - 'javax.xml.parsers.SecuritySupport$3', - 'javax.xml.parsers.SecuritySupport$4', - 'javax.xml.parsers.SecuritySupport$5', - 'javax.xml.parsers.SecuritySupport', - 'javax.xml.stream.EventFilter', - 'javax.xml.stream.FactoryConfigurationError', - 'javax.xml.stream.FactoryFinder', - 'javax.xml.stream.Location', - 'javax.xml.stream.SecuritySupport$1', - 'javax.xml.stream.SecuritySupport$2', - 'javax.xml.stream.SecuritySupport$3', - 'javax.xml.stream.SecuritySupport$4', - 'javax.xml.stream.SecuritySupport$5', - 'javax.xml.stream.SecuritySupport', - 'javax.xml.stream.StreamFilter', - 'javax.xml.stream.XMLEventFactory', - 'javax.xml.stream.XMLEventReader', - 'javax.xml.stream.XMLEventWriter', - 'javax.xml.stream.XMLInputFactory', - 'javax.xml.stream.XMLOutputFactory', - 'javax.xml.stream.XMLReporter', - 'javax.xml.stream.XMLResolver', - 'javax.xml.stream.XMLStreamConstants', - 'javax.xml.stream.XMLStreamException', - 'javax.xml.stream.XMLStreamReader', - 'javax.xml.stream.XMLStreamWriter', - 'javax.xml.stream.events.Attribute', - 'javax.xml.stream.events.Characters', - 'javax.xml.stream.events.Comment', - 'javax.xml.stream.events.DTD', - 'javax.xml.stream.events.EndDocument', - 'javax.xml.stream.events.EndElement', - 'javax.xml.stream.events.EntityDeclaration', - 'javax.xml.stream.events.EntityReference', - 'javax.xml.stream.events.Namespace', - 'javax.xml.stream.events.NotationDeclaration', - 'javax.xml.stream.events.ProcessingInstruction', - 'javax.xml.stream.events.StartDocument', - 'javax.xml.stream.events.StartElement', - 'javax.xml.stream.events.XMLEvent', - 'javax.xml.stream.util.EventReaderDelegate', - 'javax.xml.stream.util.StreamReaderDelegate', - 'javax.xml.stream.util.XMLEventAllocator', - 'javax.xml.stream.util.XMLEventConsumer', - 'javax.xml.transform.ErrorListener', - 'javax.xml.transform.FactoryFinder', - 'javax.xml.transform.OutputKeys', - 'javax.xml.transform.Result', - 'javax.xml.transform.SecuritySupport$1', - 'javax.xml.transform.SecuritySupport$2', - 'javax.xml.transform.SecuritySupport$3', - 'javax.xml.transform.SecuritySupport$4', - 'javax.xml.transform.SecuritySupport$5', - 'javax.xml.transform.SecuritySupport', - 'javax.xml.transform.Source', - 'javax.xml.transform.SourceLocator', - 'javax.xml.transform.Templates', - 'javax.xml.transform.Transformer', - 'javax.xml.transform.TransformerConfigurationException', - 'javax.xml.transform.TransformerException', - 'javax.xml.transform.TransformerFactory', - 'javax.xml.transform.TransformerFactoryConfigurationError', - 'javax.xml.transform.URIResolver', - 'javax.xml.transform.dom.DOMLocator', - 'javax.xml.transform.dom.DOMResult', - 'javax.xml.transform.dom.DOMSource', - 'javax.xml.transform.sax.SAXResult', - 'javax.xml.transform.sax.SAXSource', - 'javax.xml.transform.sax.SAXTransformerFactory', - 'javax.xml.transform.sax.TemplatesHandler', - 'javax.xml.transform.sax.TransformerHandler', - 'javax.xml.transform.stax.StAXResult', - 'javax.xml.transform.stax.StAXSource', - 'javax.xml.transform.stream.StreamResult', - 'javax.xml.transform.stream.StreamSource', - 'javax.xml.validation.Schema', - 'javax.xml.validation.SchemaFactory', - 'javax.xml.validation.SchemaFactoryFinder$1', - 'javax.xml.validation.SchemaFactoryFinder$2', - 'javax.xml.validation.SchemaFactoryFinder', - 'javax.xml.validation.SchemaFactoryLoader', - 'javax.xml.validation.SecuritySupport$1', - 'javax.xml.validation.SecuritySupport$2', - 'javax.xml.validation.SecuritySupport$3', - 'javax.xml.validation.SecuritySupport$4', - 'javax.xml.validation.SecuritySupport$5', - 'javax.xml.validation.SecuritySupport$6', - 'javax.xml.validation.SecuritySupport$7', - 'javax.xml.validation.SecuritySupport$8', - 'javax.xml.validation.SecuritySupport', - 'javax.xml.validation.TypeInfoProvider', - 'javax.xml.validation.Validator', - 'javax.xml.validation.ValidatorHandler', - 'javax.xml.xpath.SecuritySupport$1', - 'javax.xml.xpath.SecuritySupport$2', - 'javax.xml.xpath.SecuritySupport$3', - 'javax.xml.xpath.SecuritySupport$4', - 'javax.xml.xpath.SecuritySupport$5', - 'javax.xml.xpath.SecuritySupport$6', - 'javax.xml.xpath.SecuritySupport$7', - 'javax.xml.xpath.SecuritySupport$8', - 'javax.xml.xpath.SecuritySupport', - 'javax.xml.xpath.XPath', - 'javax.xml.xpath.XPathConstants', - 'javax.xml.xpath.XPathException', - 'javax.xml.xpath.XPathExpression', - 'javax.xml.xpath.XPathExpressionException', - 'javax.xml.xpath.XPathFactory', - 'javax.xml.xpath.XPathFactoryConfigurationException', - 'javax.xml.xpath.XPathFactoryFinder$1', - 'javax.xml.xpath.XPathFactoryFinder$2', - 'javax.xml.xpath.XPathFactoryFinder', - 'javax.xml.xpath.XPathFunction', - 'javax.xml.xpath.XPathFunctionException', - 'javax.xml.xpath.XPathFunctionResolver', - 'javax.xml.xpath.XPathVariableResolver', - 'org.w3c.dom.Attr', - 'org.w3c.dom.CDATASection', - 'org.w3c.dom.CharacterData', - 'org.w3c.dom.Comment', - 'org.w3c.dom.DOMConfiguration', - 'org.w3c.dom.DOMError', - 'org.w3c.dom.DOMErrorHandler', - 'org.w3c.dom.DOMException', - 'org.w3c.dom.DOMImplementation', - 'org.w3c.dom.DOMImplementationList', - 'org.w3c.dom.DOMImplementationSource', - 'org.w3c.dom.DOMLocator', - 'org.w3c.dom.DOMStringList', - 'org.w3c.dom.Document', - 'org.w3c.dom.DocumentFragment', - 'org.w3c.dom.DocumentType', - 'org.w3c.dom.Element', - 'org.w3c.dom.Entity', - 'org.w3c.dom.EntityReference', - 'org.w3c.dom.NameList', - 'org.w3c.dom.NamedNodeMap', - 'org.w3c.dom.Node', - 'org.w3c.dom.NodeList', - 'org.w3c.dom.Notation', - 'org.w3c.dom.ProcessingInstruction', - 'org.w3c.dom.Text', - 'org.w3c.dom.TypeInfo', - 'org.w3c.dom.UserDataHandler', - 'org.w3c.dom.bootstrap.DOMImplementationRegistry$1', - 'org.w3c.dom.bootstrap.DOMImplementationRegistry$2', - 'org.w3c.dom.bootstrap.DOMImplementationRegistry$3', - 'org.w3c.dom.bootstrap.DOMImplementationRegistry$4', - 'org.w3c.dom.bootstrap.DOMImplementationRegistry', - 'org.w3c.dom.css.CSS2Properties', - 'org.w3c.dom.css.CSSCharsetRule', - 'org.w3c.dom.css.CSSFontFaceRule', - 'org.w3c.dom.css.CSSImportRule', - 'org.w3c.dom.css.CSSMediaRule', - 'org.w3c.dom.css.CSSPageRule', - 'org.w3c.dom.css.CSSPrimitiveValue', - 'org.w3c.dom.css.CSSRule', - 'org.w3c.dom.css.CSSRuleList', - 'org.w3c.dom.css.CSSStyleDeclaration', - 'org.w3c.dom.css.CSSStyleRule', - 'org.w3c.dom.css.CSSStyleSheet', - 'org.w3c.dom.css.CSSUnknownRule', - 'org.w3c.dom.css.CSSValue', - 'org.w3c.dom.css.CSSValueList', - 'org.w3c.dom.css.Counter', - 'org.w3c.dom.css.DOMImplementationCSS', - 'org.w3c.dom.css.DocumentCSS', - 'org.w3c.dom.css.ElementCSSInlineStyle', - 'org.w3c.dom.css.RGBColor', - 'org.w3c.dom.css.Rect', - 'org.w3c.dom.css.ViewCSS', - 'org.w3c.dom.events.DocumentEvent', - 'org.w3c.dom.events.Event', - 'org.w3c.dom.events.EventException', - 'org.w3c.dom.events.EventListener', - 'org.w3c.dom.events.EventTarget', - 'org.w3c.dom.events.MouseEvent', - 'org.w3c.dom.events.MutationEvent', - 'org.w3c.dom.events.UIEvent', - 'org.w3c.dom.html.HTMLAnchorElement', - 'org.w3c.dom.html.HTMLAppletElement', - 'org.w3c.dom.html.HTMLAreaElement', - 'org.w3c.dom.html.HTMLBRElement', - 'org.w3c.dom.html.HTMLBaseElement', - 'org.w3c.dom.html.HTMLBaseFontElement', - 'org.w3c.dom.html.HTMLBodyElement', - 'org.w3c.dom.html.HTMLButtonElement', - 'org.w3c.dom.html.HTMLCollection', - 'org.w3c.dom.html.HTMLDListElement', - 'org.w3c.dom.html.HTMLDOMImplementation', - 'org.w3c.dom.html.HTMLDirectoryElement', - 'org.w3c.dom.html.HTMLDivElement', - 'org.w3c.dom.html.HTMLDocument', - 'org.w3c.dom.html.HTMLElement', - 'org.w3c.dom.html.HTMLFieldSetElement', - 'org.w3c.dom.html.HTMLFontElement', - 'org.w3c.dom.html.HTMLFormElement', - 'org.w3c.dom.html.HTMLFrameElement', - 'org.w3c.dom.html.HTMLFrameSetElement', - 'org.w3c.dom.html.HTMLHRElement', - 'org.w3c.dom.html.HTMLHeadElement', - 'org.w3c.dom.html.HTMLHeadingElement', - 'org.w3c.dom.html.HTMLHtmlElement', - 'org.w3c.dom.html.HTMLIFrameElement', - 'org.w3c.dom.html.HTMLImageElement', - 'org.w3c.dom.html.HTMLInputElement', - 'org.w3c.dom.html.HTMLIsIndexElement', - 'org.w3c.dom.html.HTMLLIElement', - 'org.w3c.dom.html.HTMLLabelElement', - 'org.w3c.dom.html.HTMLLegendElement', - 'org.w3c.dom.html.HTMLLinkElement', - 'org.w3c.dom.html.HTMLMapElement', - 'org.w3c.dom.html.HTMLMenuElement', - 'org.w3c.dom.html.HTMLMetaElement', - 'org.w3c.dom.html.HTMLModElement', - 'org.w3c.dom.html.HTMLOListElement', - 'org.w3c.dom.html.HTMLObjectElement', - 'org.w3c.dom.html.HTMLOptGroupElement', - 'org.w3c.dom.html.HTMLOptionElement', - 'org.w3c.dom.html.HTMLParagraphElement', - 'org.w3c.dom.html.HTMLParamElement', - 'org.w3c.dom.html.HTMLPreElement', - 'org.w3c.dom.html.HTMLQuoteElement', - 'org.w3c.dom.html.HTMLScriptElement', - 'org.w3c.dom.html.HTMLSelectElement', - 'org.w3c.dom.html.HTMLStyleElement', - 'org.w3c.dom.html.HTMLTableCaptionElement', - 'org.w3c.dom.html.HTMLTableCellElement', - 'org.w3c.dom.html.HTMLTableColElement', - 'org.w3c.dom.html.HTMLTableElement', - 'org.w3c.dom.html.HTMLTableRowElement', - 'org.w3c.dom.html.HTMLTableSectionElement', - 'org.w3c.dom.html.HTMLTextAreaElement', - 'org.w3c.dom.html.HTMLTitleElement', - 'org.w3c.dom.html.HTMLUListElement', - 'org.w3c.dom.ls.DOMImplementationLS', - 'org.w3c.dom.ls.LSException', - 'org.w3c.dom.ls.LSInput', - 'org.w3c.dom.ls.LSLoadEvent', - 'org.w3c.dom.ls.LSOutput', - 'org.w3c.dom.ls.LSParser', - 'org.w3c.dom.ls.LSParserFilter', - 'org.w3c.dom.ls.LSProgressEvent', - 'org.w3c.dom.ls.LSResourceResolver', - 'org.w3c.dom.ls.LSSerializer', - 'org.w3c.dom.ls.LSSerializerFilter', - 'org.w3c.dom.ranges.DocumentRange', - 'org.w3c.dom.ranges.Range', - 'org.w3c.dom.ranges.RangeException', - 'org.w3c.dom.stylesheets.DocumentStyle', - 'org.w3c.dom.stylesheets.LinkStyle', - 'org.w3c.dom.stylesheets.MediaList', - 'org.w3c.dom.stylesheets.StyleSheet', - 'org.w3c.dom.stylesheets.StyleSheetList', - 'org.w3c.dom.traversal.DocumentTraversal', - 'org.w3c.dom.traversal.NodeFilter', - 'org.w3c.dom.traversal.NodeIterator', - 'org.w3c.dom.traversal.TreeWalker', - 'org.w3c.dom.views.AbstractView', - 'org.w3c.dom.views.DocumentView', - 'org.w3c.dom.xpath.XPathEvaluator', - 'org.w3c.dom.xpath.XPathException', - 'org.w3c.dom.xpath.XPathExpression', - 'org.w3c.dom.xpath.XPathNSResolver', - 'org.w3c.dom.xpath.XPathNamespace', - 'org.w3c.dom.xpath.XPathResult', - 'org.xml.sax.AttributeList', - 'org.xml.sax.Attributes', - 'org.xml.sax.ContentHandler', - 'org.xml.sax.DTDHandler', - 'org.xml.sax.DocumentHandler', - 'org.xml.sax.EntityResolver', - 'org.xml.sax.ErrorHandler', - 'org.xml.sax.HandlerBase', - 'org.xml.sax.InputSource', - 'org.xml.sax.Locator', - 'org.xml.sax.Parser', - 'org.xml.sax.SAXException', - 'org.xml.sax.SAXNotRecognizedException', - 'org.xml.sax.SAXNotSupportedException', - 'org.xml.sax.SAXParseException', - 'org.xml.sax.XMLFilter', - 'org.xml.sax.XMLReader', - 'org.xml.sax.ext.Attributes2', - 'org.xml.sax.ext.Attributes2Impl', - 'org.xml.sax.ext.DeclHandler', - 'org.xml.sax.ext.DefaultHandler2', - 'org.xml.sax.ext.EntityResolver2', - 'org.xml.sax.ext.LexicalHandler', - 'org.xml.sax.ext.Locator2', - 'org.xml.sax.ext.Locator2Impl', - 'org.xml.sax.helpers.AttributeListImpl', - 'org.xml.sax.helpers.AttributesImpl', - 'org.xml.sax.helpers.DefaultHandler', - 'org.xml.sax.helpers.LocatorImpl', - 'org.xml.sax.helpers.NamespaceSupport$Context', - 'org.xml.sax.helpers.NamespaceSupport', - 'org.xml.sax.helpers.NewInstance', - 'org.xml.sax.helpers.ParserAdapter$AttributeListAdapter', - 'org.xml.sax.helpers.ParserAdapter', - 'org.xml.sax.helpers.ParserFactory', - 'org.xml.sax.helpers.SecuritySupport$1', - 'org.xml.sax.helpers.SecuritySupport$2', - 'org.xml.sax.helpers.SecuritySupport$3', - 'org.xml.sax.helpers.SecuritySupport$4', - 'org.xml.sax.helpers.SecuritySupport', - 'org.xml.sax.helpers.XMLFilterImpl', - 'org.xml.sax.helpers.XMLReaderAdapter$AttributesAdapter', - 'org.xml.sax.helpers.XMLReaderAdapter', - 'org.xml.sax.helpers.XMLReaderFactory', + // "uberjaring" (but not shading) classes that have been in the JDK since 1.5 + // nice job python. + 'javax.xml.XMLConstants', + 'javax.xml.datatype.DatatypeConfigurationException', + 'javax.xml.datatype.DatatypeConstants$1', + 'javax.xml.datatype.DatatypeConstants$Field', + 'javax.xml.datatype.DatatypeConstants', + 'javax.xml.datatype.DatatypeFactory', + 'javax.xml.datatype.Duration', + 'javax.xml.datatype.FactoryFinder', + 'javax.xml.datatype.SecuritySupport$1', + 'javax.xml.datatype.SecuritySupport$2', + 'javax.xml.datatype.SecuritySupport$3', + 'javax.xml.datatype.SecuritySupport$4', + 'javax.xml.datatype.SecuritySupport$5', + 'javax.xml.datatype.SecuritySupport', + 'javax.xml.datatype.XMLGregorianCalendar', + 'javax.xml.namespace.NamespaceContext', + 'javax.xml.namespace.QName$1', + 'javax.xml.namespace.QName', + 'javax.xml.parsers.DocumentBuilder', + 'javax.xml.parsers.DocumentBuilderFactory', + 'javax.xml.parsers.FactoryConfigurationError', + 'javax.xml.parsers.FactoryFinder', + 'javax.xml.parsers.ParserConfigurationException', + 'javax.xml.parsers.SAXParser', + 'javax.xml.parsers.SAXParserFactory', + 'javax.xml.parsers.SecuritySupport$1', + 'javax.xml.parsers.SecuritySupport$2', + 'javax.xml.parsers.SecuritySupport$3', + 'javax.xml.parsers.SecuritySupport$4', + 'javax.xml.parsers.SecuritySupport$5', + 'javax.xml.parsers.SecuritySupport', + 'javax.xml.stream.EventFilter', + 'javax.xml.stream.FactoryConfigurationError', + 'javax.xml.stream.FactoryFinder', + 'javax.xml.stream.Location', + 'javax.xml.stream.SecuritySupport$1', + 'javax.xml.stream.SecuritySupport$2', + 'javax.xml.stream.SecuritySupport$3', + 'javax.xml.stream.SecuritySupport$4', + 'javax.xml.stream.SecuritySupport$5', + 'javax.xml.stream.SecuritySupport', + 'javax.xml.stream.StreamFilter', + 'javax.xml.stream.XMLEventFactory', + 'javax.xml.stream.XMLEventReader', + 'javax.xml.stream.XMLEventWriter', + 'javax.xml.stream.XMLInputFactory', + 'javax.xml.stream.XMLOutputFactory', + 'javax.xml.stream.XMLReporter', + 'javax.xml.stream.XMLResolver', + 'javax.xml.stream.XMLStreamConstants', + 'javax.xml.stream.XMLStreamException', + 'javax.xml.stream.XMLStreamReader', + 'javax.xml.stream.XMLStreamWriter', + 'javax.xml.stream.events.Attribute', + 'javax.xml.stream.events.Characters', + 'javax.xml.stream.events.Comment', + 'javax.xml.stream.events.DTD', + 'javax.xml.stream.events.EndDocument', + 'javax.xml.stream.events.EndElement', + 'javax.xml.stream.events.EntityDeclaration', + 'javax.xml.stream.events.EntityReference', + 'javax.xml.stream.events.Namespace', + 'javax.xml.stream.events.NotationDeclaration', + 'javax.xml.stream.events.ProcessingInstruction', + 'javax.xml.stream.events.StartDocument', + 'javax.xml.stream.events.StartElement', + 'javax.xml.stream.events.XMLEvent', + 'javax.xml.stream.util.EventReaderDelegate', + 'javax.xml.stream.util.StreamReaderDelegate', + 'javax.xml.stream.util.XMLEventAllocator', + 'javax.xml.stream.util.XMLEventConsumer', + 'javax.xml.transform.ErrorListener', + 'javax.xml.transform.FactoryFinder', + 'javax.xml.transform.OutputKeys', + 'javax.xml.transform.Result', + 'javax.xml.transform.SecuritySupport$1', + 'javax.xml.transform.SecuritySupport$2', + 'javax.xml.transform.SecuritySupport$3', + 'javax.xml.transform.SecuritySupport$4', + 'javax.xml.transform.SecuritySupport$5', + 'javax.xml.transform.SecuritySupport', + 'javax.xml.transform.Source', + 'javax.xml.transform.SourceLocator', + 'javax.xml.transform.Templates', + 'javax.xml.transform.Transformer', + 'javax.xml.transform.TransformerConfigurationException', + 'javax.xml.transform.TransformerException', + 'javax.xml.transform.TransformerFactory', + 'javax.xml.transform.TransformerFactoryConfigurationError', + 'javax.xml.transform.URIResolver', + 'javax.xml.transform.dom.DOMLocator', + 'javax.xml.transform.dom.DOMResult', + 'javax.xml.transform.dom.DOMSource', + 'javax.xml.transform.sax.SAXResult', + 'javax.xml.transform.sax.SAXSource', + 'javax.xml.transform.sax.SAXTransformerFactory', + 'javax.xml.transform.sax.TemplatesHandler', + 'javax.xml.transform.sax.TransformerHandler', + 'javax.xml.transform.stax.StAXResult', + 'javax.xml.transform.stax.StAXSource', + 'javax.xml.transform.stream.StreamResult', + 'javax.xml.transform.stream.StreamSource', + 'javax.xml.validation.Schema', + 'javax.xml.validation.SchemaFactory', + 'javax.xml.validation.SchemaFactoryFinder$1', + 'javax.xml.validation.SchemaFactoryFinder$2', + 'javax.xml.validation.SchemaFactoryFinder', + 'javax.xml.validation.SchemaFactoryLoader', + 'javax.xml.validation.SecuritySupport$1', + 'javax.xml.validation.SecuritySupport$2', + 'javax.xml.validation.SecuritySupport$3', + 'javax.xml.validation.SecuritySupport$4', + 'javax.xml.validation.SecuritySupport$5', + 'javax.xml.validation.SecuritySupport$6', + 'javax.xml.validation.SecuritySupport$7', + 'javax.xml.validation.SecuritySupport$8', + 'javax.xml.validation.SecuritySupport', + 'javax.xml.validation.TypeInfoProvider', + 'javax.xml.validation.Validator', + 'javax.xml.validation.ValidatorHandler', + 'javax.xml.xpath.SecuritySupport$1', + 'javax.xml.xpath.SecuritySupport$2', + 'javax.xml.xpath.SecuritySupport$3', + 'javax.xml.xpath.SecuritySupport$4', + 'javax.xml.xpath.SecuritySupport$5', + 'javax.xml.xpath.SecuritySupport$6', + 'javax.xml.xpath.SecuritySupport$7', + 'javax.xml.xpath.SecuritySupport$8', + 'javax.xml.xpath.SecuritySupport', + 'javax.xml.xpath.XPath', + 'javax.xml.xpath.XPathConstants', + 'javax.xml.xpath.XPathException', + 'javax.xml.xpath.XPathExpression', + 'javax.xml.xpath.XPathExpressionException', + 'javax.xml.xpath.XPathFactory', + 'javax.xml.xpath.XPathFactoryConfigurationException', + 'javax.xml.xpath.XPathFactoryFinder$1', + 'javax.xml.xpath.XPathFactoryFinder$2', + 'javax.xml.xpath.XPathFactoryFinder', + 'javax.xml.xpath.XPathFunction', + 'javax.xml.xpath.XPathFunctionException', + 'javax.xml.xpath.XPathFunctionResolver', + 'javax.xml.xpath.XPathVariableResolver', + 'org.w3c.dom.Attr', + 'org.w3c.dom.CDATASection', + 'org.w3c.dom.CharacterData', + 'org.w3c.dom.Comment', + 'org.w3c.dom.DOMConfiguration', + 'org.w3c.dom.DOMError', + 'org.w3c.dom.DOMErrorHandler', + 'org.w3c.dom.DOMException', + 'org.w3c.dom.DOMImplementation', + 'org.w3c.dom.DOMImplementationList', + 'org.w3c.dom.DOMImplementationSource', + 'org.w3c.dom.DOMLocator', + 'org.w3c.dom.DOMStringList', + 'org.w3c.dom.Document', + 'org.w3c.dom.DocumentFragment', + 'org.w3c.dom.DocumentType', + 'org.w3c.dom.Element', + 'org.w3c.dom.Entity', + 'org.w3c.dom.EntityReference', + 'org.w3c.dom.NameList', + 'org.w3c.dom.NamedNodeMap', + 'org.w3c.dom.Node', + 'org.w3c.dom.NodeList', + 'org.w3c.dom.Notation', + 'org.w3c.dom.ProcessingInstruction', + 'org.w3c.dom.Text', + 'org.w3c.dom.TypeInfo', + 'org.w3c.dom.UserDataHandler', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$1', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$2', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$3', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$4', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry', + 'org.w3c.dom.css.CSS2Properties', + 'org.w3c.dom.css.CSSCharsetRule', + 'org.w3c.dom.css.CSSFontFaceRule', + 'org.w3c.dom.css.CSSImportRule', + 'org.w3c.dom.css.CSSMediaRule', + 'org.w3c.dom.css.CSSPageRule', + 'org.w3c.dom.css.CSSPrimitiveValue', + 'org.w3c.dom.css.CSSRule', + 'org.w3c.dom.css.CSSRuleList', + 'org.w3c.dom.css.CSSStyleDeclaration', + 'org.w3c.dom.css.CSSStyleRule', + 'org.w3c.dom.css.CSSStyleSheet', + 'org.w3c.dom.css.CSSUnknownRule', + 'org.w3c.dom.css.CSSValue', + 'org.w3c.dom.css.CSSValueList', + 'org.w3c.dom.css.Counter', + 'org.w3c.dom.css.DOMImplementationCSS', + 'org.w3c.dom.css.DocumentCSS', + 'org.w3c.dom.css.ElementCSSInlineStyle', + 'org.w3c.dom.css.RGBColor', + 'org.w3c.dom.css.Rect', + 'org.w3c.dom.css.ViewCSS', + 'org.w3c.dom.events.DocumentEvent', + 'org.w3c.dom.events.Event', + 'org.w3c.dom.events.EventException', + 'org.w3c.dom.events.EventListener', + 'org.w3c.dom.events.EventTarget', + 'org.w3c.dom.events.MouseEvent', + 'org.w3c.dom.events.MutationEvent', + 'org.w3c.dom.events.UIEvent', + 'org.w3c.dom.html.HTMLAnchorElement', + 'org.w3c.dom.html.HTMLAppletElement', + 'org.w3c.dom.html.HTMLAreaElement', + 'org.w3c.dom.html.HTMLBRElement', + 'org.w3c.dom.html.HTMLBaseElement', + 'org.w3c.dom.html.HTMLBaseFontElement', + 'org.w3c.dom.html.HTMLBodyElement', + 'org.w3c.dom.html.HTMLButtonElement', + 'org.w3c.dom.html.HTMLCollection', + 'org.w3c.dom.html.HTMLDListElement', + 'org.w3c.dom.html.HTMLDOMImplementation', + 'org.w3c.dom.html.HTMLDirectoryElement', + 'org.w3c.dom.html.HTMLDivElement', + 'org.w3c.dom.html.HTMLDocument', + 'org.w3c.dom.html.HTMLElement', + 'org.w3c.dom.html.HTMLFieldSetElement', + 'org.w3c.dom.html.HTMLFontElement', + 'org.w3c.dom.html.HTMLFormElement', + 'org.w3c.dom.html.HTMLFrameElement', + 'org.w3c.dom.html.HTMLFrameSetElement', + 'org.w3c.dom.html.HTMLHRElement', + 'org.w3c.dom.html.HTMLHeadElement', + 'org.w3c.dom.html.HTMLHeadingElement', + 'org.w3c.dom.html.HTMLHtmlElement', + 'org.w3c.dom.html.HTMLIFrameElement', + 'org.w3c.dom.html.HTMLImageElement', + 'org.w3c.dom.html.HTMLInputElement', + 'org.w3c.dom.html.HTMLIsIndexElement', + 'org.w3c.dom.html.HTMLLIElement', + 'org.w3c.dom.html.HTMLLabelElement', + 'org.w3c.dom.html.HTMLLegendElement', + 'org.w3c.dom.html.HTMLLinkElement', + 'org.w3c.dom.html.HTMLMapElement', + 'org.w3c.dom.html.HTMLMenuElement', + 'org.w3c.dom.html.HTMLMetaElement', + 'org.w3c.dom.html.HTMLModElement', + 'org.w3c.dom.html.HTMLOListElement', + 'org.w3c.dom.html.HTMLObjectElement', + 'org.w3c.dom.html.HTMLOptGroupElement', + 'org.w3c.dom.html.HTMLOptionElement', + 'org.w3c.dom.html.HTMLParagraphElement', + 'org.w3c.dom.html.HTMLParamElement', + 'org.w3c.dom.html.HTMLPreElement', + 'org.w3c.dom.html.HTMLQuoteElement', + 'org.w3c.dom.html.HTMLScriptElement', + 'org.w3c.dom.html.HTMLSelectElement', + 'org.w3c.dom.html.HTMLStyleElement', + 'org.w3c.dom.html.HTMLTableCaptionElement', + 'org.w3c.dom.html.HTMLTableCellElement', + 'org.w3c.dom.html.HTMLTableColElement', + 'org.w3c.dom.html.HTMLTableElement', + 'org.w3c.dom.html.HTMLTableRowElement', + 'org.w3c.dom.html.HTMLTableSectionElement', + 'org.w3c.dom.html.HTMLTextAreaElement', + 'org.w3c.dom.html.HTMLTitleElement', + 'org.w3c.dom.html.HTMLUListElement', + 'org.w3c.dom.ls.DOMImplementationLS', + 'org.w3c.dom.ls.LSException', + 'org.w3c.dom.ls.LSInput', + 'org.w3c.dom.ls.LSLoadEvent', + 'org.w3c.dom.ls.LSOutput', + 'org.w3c.dom.ls.LSParser', + 'org.w3c.dom.ls.LSParserFilter', + 'org.w3c.dom.ls.LSProgressEvent', + 'org.w3c.dom.ls.LSResourceResolver', + 'org.w3c.dom.ls.LSSerializer', + 'org.w3c.dom.ls.LSSerializerFilter', + 'org.w3c.dom.ranges.DocumentRange', + 'org.w3c.dom.ranges.Range', + 'org.w3c.dom.ranges.RangeException', + 'org.w3c.dom.stylesheets.DocumentStyle', + 'org.w3c.dom.stylesheets.LinkStyle', + 'org.w3c.dom.stylesheets.MediaList', + 'org.w3c.dom.stylesheets.StyleSheet', + 'org.w3c.dom.stylesheets.StyleSheetList', + 'org.w3c.dom.traversal.DocumentTraversal', + 'org.w3c.dom.traversal.NodeFilter', + 'org.w3c.dom.traversal.NodeIterator', + 'org.w3c.dom.traversal.TreeWalker', + 'org.w3c.dom.views.AbstractView', + 'org.w3c.dom.views.DocumentView', + 'org.w3c.dom.xpath.XPathEvaluator', + 'org.w3c.dom.xpath.XPathException', + 'org.w3c.dom.xpath.XPathExpression', + 'org.w3c.dom.xpath.XPathNSResolver', + 'org.w3c.dom.xpath.XPathNamespace', + 'org.w3c.dom.xpath.XPathResult', + 'org.xml.sax.AttributeList', + 'org.xml.sax.Attributes', + 'org.xml.sax.ContentHandler', + 'org.xml.sax.DTDHandler', + 'org.xml.sax.DocumentHandler', + 'org.xml.sax.EntityResolver', + 'org.xml.sax.ErrorHandler', + 'org.xml.sax.HandlerBase', + 'org.xml.sax.InputSource', + 'org.xml.sax.Locator', + 'org.xml.sax.Parser', + 'org.xml.sax.SAXException', + 'org.xml.sax.SAXNotRecognizedException', + 'org.xml.sax.SAXNotSupportedException', + 'org.xml.sax.SAXParseException', + 'org.xml.sax.XMLFilter', + 'org.xml.sax.XMLReader', + 'org.xml.sax.ext.Attributes2', + 'org.xml.sax.ext.Attributes2Impl', + 'org.xml.sax.ext.DeclHandler', + 'org.xml.sax.ext.DefaultHandler2', + 'org.xml.sax.ext.EntityResolver2', + 'org.xml.sax.ext.LexicalHandler', + 'org.xml.sax.ext.Locator2', + 'org.xml.sax.ext.Locator2Impl', + 'org.xml.sax.helpers.AttributeListImpl', + 'org.xml.sax.helpers.AttributesImpl', + 'org.xml.sax.helpers.DefaultHandler', + 'org.xml.sax.helpers.LocatorImpl', + 'org.xml.sax.helpers.NamespaceSupport$Context', + 'org.xml.sax.helpers.NamespaceSupport', + 'org.xml.sax.helpers.NewInstance', + 'org.xml.sax.helpers.ParserAdapter$AttributeListAdapter', + 'org.xml.sax.helpers.ParserAdapter', + 'org.xml.sax.helpers.ParserFactory', + 'org.xml.sax.helpers.SecuritySupport$1', + 'org.xml.sax.helpers.SecuritySupport$2', + 'org.xml.sax.helpers.SecuritySupport$3', + 'org.xml.sax.helpers.SecuritySupport$4', + 'org.xml.sax.helpers.SecuritySupport', + 'org.xml.sax.helpers.XMLFilterImpl', + 'org.xml.sax.helpers.XMLReaderAdapter$AttributesAdapter', + 'org.xml.sax.helpers.XMLReaderAdapter', + 'org.xml.sax.helpers.XMLReaderFactory', + + // classes are missing + 'com.jcraft.jzlib.Deflater', + 'com.jcraft.jzlib.Inflater', + 'com.jcraft.jzlib.JZlib$WrapperType', + 'com.jcraft.jzlib.JZlib', + 'javassist.ClassClassPath', + 'javassist.ClassPath', + 'javassist.ClassPool', + 'javassist.CtClass', + 'javassist.CtMethod', + 'javax.servlet.Filter', + 'javax.servlet.FilterChain', + 'javax.servlet.FilterConfig', + 'javax.servlet.ServletConfig', + 'javax.servlet.ServletContext', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'javax.servlet.ServletException', + 'javax.servlet.ServletRequest', + 'javax.servlet.ServletResponse', + 'javax.servlet.http.HttpServlet', + 'javax.servlet.http.HttpServletRequest', + 'javax.servlet.http.HttpServletResponse', + 'jnr.x86asm.Asm', + 'jnr.x86asm.Assembler', + 'jnr.x86asm.CPU', + 'jnr.x86asm.Mem', + 'jnr.x86asm.Register', + 'junit.framework.Assert', + 'junit.framework.TestCase', + 'org.antlr.stringtemplate.StringTemplate', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + 'org.jboss.marshalling.ByteInput', + 'org.jboss.marshalling.ByteOutput', + 'org.jboss.marshalling.Marshaller', + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + 'org.junit.Assert', + 'org.junit.internal.matchers.CombinableMatcher', + 'org.junit.matchers.JUnitMatchers', + 'org.junit.runner.JUnitCore', + 'org.python.apache.commons.logging.Log', + 'org.python.apache.commons.logging.LogFactory', + 'org.python.apache.log4j.Level', + 'org.python.apache.log4j.Logger', + 'org.python.apache.tomcat.jni.Buffer', + 'org.python.apache.tomcat.jni.CertificateVerifier', + 'org.python.apache.tomcat.jni.Library', + 'org.python.apache.tomcat.jni.Pool', + 'org.python.apache.tomcat.jni.SSL', + 'org.python.apache.tomcat.jni.SSLContext', + 'org.python.apache.tools.ant.BuildException', + 'org.python.apache.tools.ant.DirectoryScanner', + 'org.python.apache.tools.ant.Project', + 'org.python.apache.tools.ant.taskdefs.Execute', + 'org.python.apache.tools.ant.taskdefs.Java', + 'org.python.apache.tools.ant.taskdefs.MatchingTask', + 'org.python.apache.tools.ant.types.Commandline$Argument', + 'org.python.apache.tools.ant.types.Path', + 'org.python.apache.tools.ant.types.Resource', + 'org.python.apache.tools.ant.types.ResourceCollection', + 'org.python.apache.tools.ant.types.resources.BaseResourceCollectionContainer', + 'org.python.apache.tools.ant.util.GlobPatternMapper', + 'org.python.apache.tools.ant.util.SourceFileScanner', + 'org.python.apache.xml.resolver.Catalog', + 'org.python.apache.xml.resolver.CatalogManager', + 'org.python.apache.xml.resolver.readers.SAXCatalogReader', + 'org.python.google.protobuf.CodedInputStream', + 'org.python.google.protobuf.CodedOutputStream', + 'org.python.google.protobuf.ExtensionRegistry', + 'org.python.google.protobuf.ExtensionRegistryLite', + 'org.python.google.protobuf.MessageLite$Builder', + 'org.python.google.protobuf.MessageLite', + 'org.python.google.protobuf.MessageLiteOrBuilder', + 'org.python.google.protobuf.Parser', + 'org.python.objectweb.asm.tree.AbstractInsnNode', + 'org.python.objectweb.asm.tree.ClassNode', + 'org.python.objectweb.asm.tree.InsnList', + 'org.python.objectweb.asm.tree.InsnNode', + 'org.python.objectweb.asm.tree.JumpInsnNode', + 'org.python.objectweb.asm.tree.LabelNode', + 'org.python.objectweb.asm.tree.LocalVariableNode', + 'org.python.objectweb.asm.tree.LookupSwitchInsnNode', + 'org.python.objectweb.asm.tree.MethodNode', + 'org.python.objectweb.asm.tree.TableSwitchInsnNode', + 'org.python.objectweb.asm.tree.TryCatchBlockNode', + 'org.python.objectweb.asm.tree.analysis.Analyzer', + 'org.python.objectweb.asm.tree.analysis.BasicValue', + 'org.python.objectweb.asm.tree.analysis.BasicVerifier', + 'org.python.objectweb.asm.tree.analysis.Frame', + 'org.python.objectweb.asm.tree.analysis.SimpleVerifier', + 'org.tukaani.xz.ARMOptions', + 'org.tukaani.xz.ARMThumbOptions', + 'org.tukaani.xz.DeltaOptions', + 'org.tukaani.xz.FilterOptions', + 'org.tukaani.xz.FinishableWrapperOutputStream', + 'org.tukaani.xz.IA64Options', + 'org.tukaani.xz.LZMA2InputStream', + 'org.tukaani.xz.LZMA2Options', + 'org.tukaani.xz.LZMAInputStream', + 'org.tukaani.xz.PowerPCOptions', + 'org.tukaani.xz.SPARCOptions', + 'org.tukaani.xz.SingleXZInputStream', + 'org.tukaani.xz.UnsupportedOptionsException', + 'org.tukaani.xz.X86Options', + 'org.tukaani.xz.XZ', + 'org.tukaani.xz.XZInputStream', + 'org.tukaani.xz.XZOutputStream', ] diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index 58f2dceb740..bbe89aa1fd4 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -69,9 +69,1928 @@ forbiddenPatterns { exclude '**/*.epub' } -// classes are missing, e.g. org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.syndication (SyndFeedInput, SyndFeed, SyndEntry, SyndContent) - 'org.apache.tika.parser.feed.FeedParser', + // classes are missing: some due to our whitelisting of parsers + 'com.coremedia.iso.IsoFile', + 'com.coremedia.iso.boxes.Box', + 'com.coremedia.iso.boxes.Container', + 'com.coremedia.iso.boxes.FileTypeBox', + 'com.coremedia.iso.boxes.MetaBox', + 'com.coremedia.iso.boxes.MovieBox', + 'com.coremedia.iso.boxes.MovieHeaderBox', + 'com.coremedia.iso.boxes.SampleTableBox', + 'com.coremedia.iso.boxes.TrackBox', + 'com.coremedia.iso.boxes.TrackHeaderBox', + 'com.coremedia.iso.boxes.UserDataBox', + 'com.coremedia.iso.boxes.apple.AppleItemListBox', + 'com.coremedia.iso.boxes.sampleentry.AudioSampleEntry', + 'com.drew.imaging.jpeg.JpegMetadataReader', + 'com.drew.imaging.tiff.TiffMetadataReader', + 'com.drew.imaging.webp.WebpMetadataReader', + 'com.drew.lang.ByteArrayReader', + 'com.drew.lang.GeoLocation', + 'com.drew.lang.Rational', + 'com.drew.metadata.Directory', + 'com.drew.metadata.Metadata', + 'com.drew.metadata.Tag', + 'com.drew.metadata.exif.ExifIFD0Directory', + 'com.drew.metadata.exif.ExifReader', + 'com.drew.metadata.exif.ExifSubIFDDirectory', + 'com.drew.metadata.exif.ExifThumbnailDirectory', + 'com.drew.metadata.exif.GpsDirectory', + 'com.drew.metadata.iptc.IptcDirectory', + 'com.drew.metadata.jpeg.JpegCommentDirectory', + 'com.drew.metadata.jpeg.JpegDirectory', + 'com.drew.metadata.xmp.XmpReader', + 'com.github.junrar.Archive', + 'com.github.junrar.rarfile.FileHeader', + 'com.googlecode.mp4parser.DataSource', + 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', + 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleArtistBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCommentBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCompilationBox', + 'com.googlecode.mp4parser.boxes.apple.AppleDiskNumberBox', + 'com.googlecode.mp4parser.boxes.apple.AppleEncoderBox', + 'com.googlecode.mp4parser.boxes.apple.AppleGenreBox', + 'com.googlecode.mp4parser.boxes.apple.AppleNameBox', + 'com.googlecode.mp4parser.boxes.apple.AppleRecordingYear2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackAuthorBox', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', + 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', + 'com.googlecode.mp4parser.util.CastUtils', + 'com.healthmarketscience.jackcess.Column', + 'com.healthmarketscience.jackcess.CryptCodecProvider', + 'com.healthmarketscience.jackcess.DataType', + 'com.healthmarketscience.jackcess.Database', + 'com.healthmarketscience.jackcess.DatabaseBuilder', + 'com.healthmarketscience.jackcess.PropertyMap$Property', + 'com.healthmarketscience.jackcess.PropertyMap', + 'com.healthmarketscience.jackcess.Row', + 'com.healthmarketscience.jackcess.Table', + 'com.healthmarketscience.jackcess.query.Query', + 'com.healthmarketscience.jackcess.util.LinkResolver', + 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', + 'com.healthmarketscience.jackcess.util.OleBlob$Content', + 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', + 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', + 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', + 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', + 'com.healthmarketscience.jackcess.util.OleBlob', + 'com.healthmarketscience.jackcess.util.TableIterableBuilder', + 'com.ibm.icu.text.Bidi', + 'com.ibm.icu.text.Normalizer', + 'com.jmatio.io.MatFileHeader', + 'com.jmatio.io.MatFileReader', + 'com.jmatio.types.MLArray', + 'com.jmatio.types.MLStructure', + 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', + 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', + 'com.microsoft.schemas.office.x2006.digsig.STVersion', + 'com.pff.PSTAttachment', + 'com.pff.PSTFile', + 'com.pff.PSTFolder', + 'com.pff.PSTMessage', + 'com.sun.syndication.feed.synd.SyndContent', + 'com.sun.syndication.feed.synd.SyndEntry', + 'com.sun.syndication.feed.synd.SyndFeed', + 'com.sun.syndication.io.SyndFeedInput', + 'com.uwyn.jhighlight.renderer.Renderer', + 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', + 'de.l3s.boilerpipe.BoilerpipeExtractor', + 'de.l3s.boilerpipe.document.TextBlock', + 'de.l3s.boilerpipe.document.TextDocument', + 'de.l3s.boilerpipe.extractors.DefaultExtractor', + 'de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler', + 'javax.mail.BodyPart', + 'javax.mail.Header', + 'javax.mail.Message$RecipientType', + 'javax.mail.MessagingException', + 'javax.mail.Multipart', + 'javax.mail.Part', + 'javax.mail.Session', + 'javax.mail.Transport', + 'javax.mail.internet.ContentType', + 'javax.mail.internet.InternetAddress', + 'javax.mail.internet.InternetHeaders', + 'javax.mail.internet.MimeBodyPart', + 'javax.mail.internet.MimeMessage', + 'javax.mail.internet.MimeMultipart', + 'javax.mail.internet.MimePart', + 'javax.mail.internet.SharedInputStream', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'javax.ws.rs.core.Response', + 'junit.framework.TestCase', + 'opennlp.tools.namefind.NameFinderME', + 'opennlp.tools.namefind.TokenNameFinderModel', + 'opennlp.tools.util.Span', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.commons.csv.CSVFormat', + 'org.apache.commons.csv.CSVParser', + 'org.apache.commons.csv.CSVRecord', + 'org.apache.commons.exec.CommandLine', + 'org.apache.commons.exec.DefaultExecutor', + 'org.apache.commons.exec.ExecuteWatchdog', + 'org.apache.commons.exec.PumpStreamHandler', + 'org.apache.commons.exec.environment.EnvironmentUtils', + 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', + 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', + 'org.apache.cxf.jaxrs.client.WebClient', + 'org.apache.cxf.jaxrs.ext.multipart.Attachment', + 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', + 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', + 'org.apache.james.mime4j.MimeException', + 'org.apache.james.mime4j.codec.DecodeMonitor', + 'org.apache.james.mime4j.codec.DecoderUtil', + 'org.apache.james.mime4j.dom.FieldParser', + 'org.apache.james.mime4j.dom.address.Address', + 'org.apache.james.mime4j.dom.address.AddressList', + 'org.apache.james.mime4j.dom.address.Mailbox', + 'org.apache.james.mime4j.dom.address.MailboxList', + 'org.apache.james.mime4j.dom.field.AddressListField', + 'org.apache.james.mime4j.dom.field.DateTimeField', + 'org.apache.james.mime4j.dom.field.MailboxListField', + 'org.apache.james.mime4j.dom.field.ParsedField', + 'org.apache.james.mime4j.dom.field.UnstructuredField', + 'org.apache.james.mime4j.field.LenientFieldParser', + 'org.apache.james.mime4j.parser.ContentHandler', + 'org.apache.james.mime4j.parser.MimeStreamParser', + 'org.apache.james.mime4j.stream.BodyDescriptor', + 'org.apache.james.mime4j.stream.Field', + 'org.apache.james.mime4j.stream.MimeConfig', + 'org.apache.jcp.xml.dsig.internal.dom.DOMDigestMethod', + 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', + 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', + 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.sis.internal.util.CheckedArrayList', + 'org.apache.sis.internal.util.CheckedHashSet', + 'org.apache.sis.metadata.iso.DefaultMetadata', + 'org.apache.sis.metadata.iso.DefaultMetadataScope', + 'org.apache.sis.metadata.iso.constraint.DefaultLegalConstraints', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicDescription', + 'org.apache.sis.metadata.iso.identification.DefaultDataIdentification', + 'org.apache.sis.storage.DataStore', + 'org.apache.sis.storage.DataStores', + 'org.apache.sis.util.collection.CodeListSet', + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.FileScanner', + 'org.apache.tools.ant.Project', + 'org.apache.tools.ant.taskdefs.Jar', + 'org.apache.tools.ant.taskdefs.Javac', + 'org.apache.tools.ant.taskdefs.MatchingTask', + 'org.apache.tools.ant.types.FileSet', + 'org.apache.tools.ant.types.Path$PathElement', + 'org.apache.tools.ant.types.Path', + 'org.apache.tools.ant.types.Reference', + 'org.apache.uima.UIMAFramework', + 'org.apache.uima.analysis_engine.AnalysisEngine', + 'org.apache.uima.cas.Type', + 'org.apache.uima.cas.impl.XCASSerializer', + 'org.apache.uima.cas.impl.XmiCasSerializer', + 'org.apache.uima.cas.impl.XmiSerializationSharedData', + 'org.apache.uima.fit.util.JCasUtil', + 'org.apache.uima.jcas.JCas', + 'org.apache.uima.jcas.cas.FSArray', + 'org.apache.uima.util.XMLInputSource', + 'org.apache.uima.util.XMLParser', + 'org.apache.uima.util.XmlCasSerializer', + 'org.apache.xml.security.Init', + 'org.apache.xml.security.c14n.Canonicalizer', + 'org.apache.xml.security.utils.Base64', + 'org.bouncycastle.asn1.DERObject', + 'org.etsi.uri.x01903.v13.AnyType', + 'org.etsi.uri.x01903.v13.ClaimedRolesListType', + 'org.etsi.uri.x01903.v13.CounterSignatureType', + 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', + 'org.etsi.uri.x01903.v13.DataObjectFormatType', + 'org.etsi.uri.x01903.v13.IdentifierType', + 'org.etsi.uri.x01903.v13.IncludeType', + 'org.etsi.uri.x01903.v13.ObjectIdentifierType', + 'org.etsi.uri.x01903.v13.OtherCertStatusRefsType', + 'org.etsi.uri.x01903.v13.OtherCertStatusValuesType', + 'org.etsi.uri.x01903.v13.ReferenceInfoType', + 'org.etsi.uri.x01903.v13.SigPolicyQualifiersListType', + 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', + 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', + 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.SignerRoleType', + 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', + 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', + 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1EncapsulatedX509CertificateList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1OtherCertificateList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1EncapsulatedTimeStampList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1IncludeList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1ReferenceInfoList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1XMLTimeStampList', + 'org.etsi.uri.x01903.v13.impl.OCSPRefsTypeImpl$1OCSPRefList', + 'org.etsi.uri.x01903.v13.impl.OCSPValuesTypeImpl$1EncapsulatedOCSPValueList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1ArchiveTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttrAuthoritiesCertValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CertificateValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CounterSignatureList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RefsOnlyTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SigAndRefsTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', + 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', + 'org.etsi.uri.x01903.v14.ValidationDataType', + 'org.json.JSONArray', + 'org.json.JSONObject', + 'org.json.XML', + 'org.json.simple.JSONArray', + 'org.json.simple.JSONObject', + 'org.json.simple.JSONValue', + 'org.junit.Test', + 'org.junit.internal.TextListener', + 'org.junit.runner.JUnitCore', + 'org.junit.runner.Result', + 'org.objectweb.asm.AnnotationVisitor', + 'org.objectweb.asm.Attribute', + 'org.objectweb.asm.ClassReader', + 'org.objectweb.asm.ClassVisitor', + 'org.objectweb.asm.FieldVisitor', + 'org.objectweb.asm.MethodVisitor', + 'org.objectweb.asm.Type', + 'org.opengis.metadata.Identifier', + 'org.opengis.metadata.citation.Address', + 'org.opengis.metadata.citation.Citation', + 'org.opengis.metadata.citation.CitationDate', + 'org.opengis.metadata.citation.Contact', + 'org.opengis.metadata.citation.DateType', + 'org.opengis.metadata.citation.OnLineFunction', + 'org.opengis.metadata.citation.OnlineResource', + 'org.opengis.metadata.citation.ResponsibleParty', + 'org.opengis.metadata.citation.Role', + 'org.opengis.metadata.constraint.Restriction', + 'org.opengis.metadata.distribution.DigitalTransferOptions', + 'org.opengis.metadata.distribution.Distribution', + 'org.opengis.metadata.distribution.Distributor', + 'org.opengis.metadata.distribution.Format', + 'org.opengis.metadata.extent.Extent', + 'org.opengis.metadata.identification.Identification', + 'org.opengis.metadata.identification.KeywordType', + 'org.opengis.metadata.identification.Keywords', + 'org.opengis.metadata.identification.Progress', + 'org.opengis.metadata.identification.TopicCategory', + 'org.opengis.metadata.maintenance.ScopeCode', + 'org.opengis.util.InternationalString', + + // Missing openxml schema classes are explained by the fact we use the smaller jar: + // "The full jar of all of the schemas is ooxml-schemas-xx.jar, and it is currently around 15mb. + // The smaller poi-ooxml-schemas jar is only about 4mb. + // This latter jar file only contains the typically used parts though." + // http://poi.apache.org/faq.html#faq-N10025 + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAxisUnit', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTChartLines', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbls', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDateAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExternalData', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTGrouping', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblAlgn', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLegendEntry', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMarkerSize', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRadarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRelId', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSerAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Bar3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BubbleChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1CatAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DateAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DoughnutChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Line3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1LineChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1OfPieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Pie3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1PieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1RadarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ScatterChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SerAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1StockChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectionSite', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDashStopList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectContainer', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGlowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTHSLEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinBevel', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPatternFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPolarAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTScene3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShape3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTablePartStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBlipBullet', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletColorFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletSizeFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletTypefaceFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTileInfoProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTXYAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlackWhiteMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', + 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STGuid', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STRectAlignment', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextTabAlignType', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTileFlipMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaBiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaCeilingList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaFloorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaInvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModFixList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BlurList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrChangeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1DuotoneList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1FillOverlayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1GraysclList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1HslList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFontCollectionImpl$1FontList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGeomGuideListImpl$1GdList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGradientStopListImpl$1GsList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTLineStyleListImpl$1LnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTOfficeArtExtensionListImpl$1ExtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DCubicBezierToImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1ArcToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CloseList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CubicBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1LnToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1MoveToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1QuadBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DListImpl$1PathList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableImpl$1TrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableStyleListImpl$1TblStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextBodyImpl$1PList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTAbsoluteAnchor', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1CxnSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GraphicFrameList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GrpSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1PicList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1SpList', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTEffectExtent', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosH', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosV', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapNone', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapSquare', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapThrough', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTight', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTopBottom', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTArray', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTCf', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTEmpty', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTNull', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTVstream', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STCy', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STError', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STVectorBaseType', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CfList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1VariantList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTAcc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBar', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBorderBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTD', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTEqArr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTF', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTFunc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTGroupChr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimLow', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimUpp', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTM', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTMathPr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTNary', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMath', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMathPara', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTPhant', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTR', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTRad', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSPre', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSub', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSubSup', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSup', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTControlList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomShowList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomerData', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTEmbeddedFontList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHeaderFooter', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTransition', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', + 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', + 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', + 'org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderSize', + 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1GraphicFrameList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1PicList', + 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCacheHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedMembers', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellWatches', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetProtection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColors', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConditionalFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConsolidation', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTControls', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCsPageSetup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomProperties', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomSheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomWorkbookViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataConsolidate', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDateTime', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDdeLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDimensions', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExternalSheetDataSet', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFieldGroup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileRecoveryPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileSharing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileVersion', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFilterColumn', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTIgnoredErrors', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTNumber', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPhoneticRun', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTProtectedRanges', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRecord', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTScenarios', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSheetBackgroundPicture', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagTypes', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyleInfo', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTX', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STCellSpans', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDataValidationImeMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STFieldSortType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STGuid', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticAlignment', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPrintError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STRefMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STSheetViewType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STShowDataAs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTableType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTimePeriod', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTotalsRowFunction', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STUpdateLinks', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STVisibility', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$1AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$2AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAutoFilterImpl$1FilterColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBookViewsImpl$1WorkbookViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCalcChainImpl$1CList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$2FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColsImpl$1ColList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataValidationsImpl$1DataValidationList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDxfsImpl$1DxfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalDefinedNamesImpl$1DefinedNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalReferencesImpl$1ExternalReferenceList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalSheetNamesImpl$1SheetNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFillsImpl$1FillList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1NameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontsImpl$1FontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTHyperlinksImpl$1HyperlinkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTIconSetImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTItemsImpl$1ItemList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1MapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1SchemaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMergeCellsImpl$1MergeCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTNumFmtsImpl$1NumFmtList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageBreakImpl$1BrkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageFieldsImpl$1PageFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCacheRecordsImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCachesImpl$1PivotCacheList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotFieldsImpl$1PivotFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1RFontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RPhList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetDataImpl$1RowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSingleXmlCellsImpl$1SingleXmlCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSstImpl$1SiList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTableColumnsImpl$1TableColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTablePartsImpl$1TablePartList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorkbookImpl$1FileRecoveryPrList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ColsList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBackground', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCnf', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColorSchemeMapping', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColumns', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCompat', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTControl', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlBlock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlCell', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRun', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocGrid', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocRsids', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocVars', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFStatusText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFTextInput', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFitText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTHighlight', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLongHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvlLegacy', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMacroName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMailMerge', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMultiLevelType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTNumPicBullet', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageBorders', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageSz', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPaperSource', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTParaRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPerm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPermStart', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPlaceholder', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTProof', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTReadingModeInkLockDown', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRuby', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSaveThroughXslt', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtComboBox', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDate', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDropDownList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSignedTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblLayoutType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrExChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblStylePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextScale', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangesView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWriteProtection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHeightRule', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabRelativeTo', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STThemeColor', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTEndnotesImpl$1EndnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EntryMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1ExitMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1HelpTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFootnotesImpl$1FootnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTLatentStylesImpl$1LsdExceptionList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumImpl$1LvlOverrideList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1AbstractNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1AnnotationRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1BrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CommentReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ContinuationSeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1NoBreakHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ObjectList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PgNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PictList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PtabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1RubyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SoftHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SymList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1AccList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BarList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BorderBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1EqArrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FuncList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1GroupChrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimLowList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimUppList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1NaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PhantList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1R2List', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RadList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SPreList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtEndPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1AliasList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1BibliographyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1CitationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ComboBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DataBindingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DateList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartObjList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DropDownListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1EquationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1GroupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1IdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1LockList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PictureList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PlaceholderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RichTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ShowingPlcHdrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TemporaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1FooterReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1HeaderReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1ActiveWritingStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1AttachedSchemaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1SmartTagTypeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagPrImpl$1AttrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStyleImpl$1TblStylePrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStylesImpl$1StyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTabsImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblGridBaseImpl$1GridColList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1TrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CantSplitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CnfStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1DivIdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1HiddenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1JcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblCellSpacingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblHeaderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TrHeightList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1TblList', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.ServiceReference', + 'org.osgi.framework.ServiceRegistration', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + 'org.sqlite.SQLiteConfig', + 'org.tukaani.xz.ARMOptions', + 'org.tukaani.xz.ARMThumbOptions', + 'org.tukaani.xz.DeltaOptions', + 'org.tukaani.xz.FilterOptions', + 'org.tukaani.xz.FinishableWrapperOutputStream', + 'org.tukaani.xz.IA64Options', + 'org.tukaani.xz.LZMA2InputStream', + 'org.tukaani.xz.LZMA2Options', + 'org.tukaani.xz.LZMAInputStream', + 'org.tukaani.xz.PowerPCOptions', + 'org.tukaani.xz.SPARCOptions', + 'org.tukaani.xz.SingleXZInputStream', + 'org.tukaani.xz.UnsupportedOptionsException', + 'org.tukaani.xz.X86Options', + 'org.tukaani.xz.XZ', + 'org.tukaani.xz.XZInputStream', + 'org.tukaani.xz.XZOutputStream', + 'org.w3.x2000.x09.xmldsig.KeyInfoType', + 'org.w3.x2000.x09.xmldsig.SignatureMethodType', + 'org.w3.x2000.x09.xmldsig.SignatureValueType', + 'org.w3.x2000.x09.xmldsig.TransformsType', + 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', + 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', + 'schemasMicrosoftComOfficeExcel.STCF', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1Accel2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AccelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AnchorList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoFillList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoScaleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CFList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CameraList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CancelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CheckedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColoredList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColumnList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DDEList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultSizeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DisabledList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DismissList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropLinesList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropStyleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FirstButtonList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaGroupList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaLinkList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaMacroList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaRangeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaTxbxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HelpList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HorizList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1IncList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1JustLastXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LCTList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ListItemList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MapOCXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MaxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MoveWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiSelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeD2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeDList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PrintObjectList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RecalcAlwaysList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptExtendedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLanguageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLocationList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SecretEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelTypeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SizeWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextHAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextVAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1UIObjList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VScrollList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VTEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValidIdsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VisibleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1WidthMinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2Accel2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AccelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AnchorList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoFillList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoScaleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CFList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CameraList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CancelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CheckedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColoredList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColumnList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DDEList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultSizeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DisabledList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DismissList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropLinesList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropStyleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FirstButtonList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaGroupList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaLinkList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaMacroList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaRangeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaTxbxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HelpList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HorizList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2IncList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2JustLastXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LCTList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ListItemList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MapOCXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MaxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MoveWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiSelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeD2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeDList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PrintObjectList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RecalcAlwaysList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptExtendedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLanguageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLocationList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SecretEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelTypeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SizeWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextHAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextVAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2UIObjList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VScrollList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VTEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValidIdsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VisibleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2WidthMinList', + 'schemasMicrosoftComOfficeOffice.CTCallout', + 'schemasMicrosoftComOfficeOffice.CTClipPath', + 'schemasMicrosoftComOfficeOffice.CTComplex', + 'schemasMicrosoftComOfficeOffice.CTDiagram', + 'schemasMicrosoftComOfficeOffice.CTExtrusion', + 'schemasMicrosoftComOfficeOffice.CTFill', + 'schemasMicrosoftComOfficeOffice.CTInk', + 'schemasMicrosoftComOfficeOffice.CTRegroupTable', + 'schemasMicrosoftComOfficeOffice.CTRules', + 'schemasMicrosoftComOfficeOffice.CTSignatureLine', + 'schemasMicrosoftComOfficeOffice.CTSkew', + 'schemasMicrosoftComOfficeOffice.CTStrokeChild', + 'schemasMicrosoftComOfficeOffice.STBWMode', + 'schemasMicrosoftComOfficeOffice.STConnectorType', + 'schemasMicrosoftComOfficeOffice.STHrAlign', + 'schemasMicrosoftComOfficeOffice.STRelationshipId', + 'schemasMicrosoftComOfficeOffice.STTrueFalse', + 'schemasMicrosoftComOfficeOffice.STTrueFalseBlank', + 'schemasMicrosoftComOfficePowerpoint.CTEmpty', + 'schemasMicrosoftComOfficePowerpoint.CTRel', + 'schemasMicrosoftComOfficeWord.CTAnchorLock', + 'schemasMicrosoftComOfficeWord.CTBorder', + 'schemasMicrosoftComOfficeWord.CTWrap', + 'schemasMicrosoftComVml.CTArc', + 'schemasMicrosoftComVml.CTCurve', + 'schemasMicrosoftComVml.CTImage', + 'schemasMicrosoftComVml.CTImageData', + 'schemasMicrosoftComVml.CTLine', + 'schemasMicrosoftComVml.CTOval', + 'schemasMicrosoftComVml.CTPolyLine', + 'schemasMicrosoftComVml.CTRect', + 'schemasMicrosoftComVml.CTRoundRect', + 'schemasMicrosoftComVml.STEditAs', + 'schemasMicrosoftComVml.STFillMethod', + 'schemasMicrosoftComVml.STFillType', + 'schemasMicrosoftComVml.STImageAspect', + 'schemasMicrosoftComVml.STShadowType', + 'schemasMicrosoftComVml.STStrokeArrowLength', + 'schemasMicrosoftComVml.STStrokeArrowType', + 'schemasMicrosoftComVml.STStrokeArrowWidth', + 'schemasMicrosoftComVml.STStrokeEndCap', + 'schemasMicrosoftComVml.STStrokeLineStyle', + 'schemasMicrosoftComVml.STTrueFalseBlank', + 'schemasMicrosoftComVml.impl.CTFormulasImpl$1FList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ArcList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClientDataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1CurveList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1DiagramList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1GroupList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImageList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1LineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1OvalList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1PolylineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1RectList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1RoundrectList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapetypeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1WrapList', + 'schemasMicrosoftComVml.impl.CTHandlesImpl$1HList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1InkList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1IscommentList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1WrapList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClientDataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1WrapList', + 'ucar.ma2.DataType', + 'ucar.nc2.Attribute', + 'ucar.nc2.Dimension', + 'ucar.nc2.Group', + 'ucar.nc2.NetcdfFile', + 'ucar.nc2.Variable', + 'ucar.nc2.dataset.NetcdfDataset', ] diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index eb3004af2ad..68ab6f56ddb 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -17,8 +17,6 @@ * under the License. */ -//apply plugin: 'nebula.provided-base' - import org.apache.tools.ant.taskdefs.condition.Os import java.nio.file.Files import java.nio.file.Path @@ -100,34 +98,243 @@ integTest { compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // note: the jersey ones may be bogus, see my bug report at forbidden-apis! - // internal java api: com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable - // internal java api: com.sun.jersey.api.core.HttpContext - // internal java api: com.sun.jersey.core.spi.component.ComponentScope - // internal java api: com.sun.jersey.spi.inject.Injectable - // internal java api: com.sun.jersey.core.spi.component.ComponentContext - 'org.apache.hadoop.hdfs.web.resources.UserProvider', - - // internal java api: com.sun.jersey.spi.container.ResourceFilters - 'org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods', - // internal java api: com.sun.jersey.spi.container.servlet.ServletContainer - 'org.apache.hadoop.http.HttpServer', - 'org.apache.hadoop.http.HttpServer2', - - // internal java api: com.sun.jersey.api.ParamException - 'org.apache.hadoop.hdfs.web.resources.ExceptionHandler', - 'org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ExceptionHandler', - 'org.apache.hadoop.hdfs.web.ParamFilter', - - // internal java api: com.sun.jersey.spi.container.ContainerRequestFilter - // internal java api: com.sun.jersey.spi.container.ContainerRequest - 'org.apache.hadoop.hdfs.web.ParamFilter', - 'org.apache.hadoop.hdfs.web.ParamFilter$1', - - // internal java api: com.sun.jndi.ldap.LdapCtxFactory - 'org.apache.hadoop.security.LdapGroupsMapping', + // classes are missing, because we added hadoop jars one by one until tests pass. + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonWriter', + 'com.jcraft.jsch.ChannelExec', + 'com.jcraft.jsch.JSch', + 'com.jcraft.jsch.Logger', + 'com.jcraft.jsch.Session', + 'com.sun.jersey.api.ParamException', + 'com.sun.jersey.api.core.HttpContext', + 'com.sun.jersey.core.spi.component.ComponentContext', + 'com.sun.jersey.core.spi.component.ComponentScope', + 'com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable', + 'com.sun.jersey.spi.container.ContainerRequest', + 'com.sun.jersey.spi.container.ContainerRequestFilter', + 'com.sun.jersey.spi.container.ContainerResponseFilter', + 'com.sun.jersey.spi.container.ResourceFilter', + 'com.sun.jersey.spi.container.servlet.ServletContainer', + 'com.sun.jersey.spi.inject.Injectable', + 'com.sun.jersey.spi.inject.InjectableProvider', + 'io.netty.bootstrap.Bootstrap', + 'io.netty.bootstrap.ChannelFactory', + 'io.netty.bootstrap.ServerBootstrap', + 'io.netty.buffer.ByteBuf', + 'io.netty.buffer.Unpooled', + 'io.netty.channel.Channel', + 'io.netty.channel.ChannelFuture', + 'io.netty.channel.ChannelFutureListener', + 'io.netty.channel.ChannelHandler', + 'io.netty.channel.ChannelHandlerContext', + 'io.netty.channel.ChannelInboundHandlerAdapter', + 'io.netty.channel.ChannelInitializer', + 'io.netty.channel.ChannelPipeline', + 'io.netty.channel.EventLoopGroup', + 'io.netty.channel.SimpleChannelInboundHandler', + 'io.netty.channel.group.ChannelGroup', + 'io.netty.channel.group.ChannelGroupFuture', + 'io.netty.channel.group.DefaultChannelGroup', + 'io.netty.channel.nio.NioEventLoopGroup', + 'io.netty.channel.socket.SocketChannel', + 'io.netty.channel.socket.nio.NioServerSocketChannel', + 'io.netty.channel.socket.nio.NioSocketChannel', + 'io.netty.handler.codec.http.DefaultFullHttpRequest', + 'io.netty.handler.codec.http.DefaultFullHttpResponse', + 'io.netty.handler.codec.http.DefaultHttpResponse', + 'io.netty.handler.codec.http.HttpContent', + 'io.netty.handler.codec.http.HttpHeaders', + 'io.netty.handler.codec.http.HttpMethod', + 'io.netty.handler.codec.http.HttpRequest', + 'io.netty.handler.codec.http.HttpRequestDecoder', + 'io.netty.handler.codec.http.HttpRequestEncoder', + 'io.netty.handler.codec.http.HttpResponseEncoder', + 'io.netty.handler.codec.http.HttpResponseStatus', + 'io.netty.handler.codec.http.HttpVersion', + 'io.netty.handler.codec.http.QueryStringDecoder', + 'io.netty.handler.codec.string.StringEncoder', + 'io.netty.handler.ssl.SslHandler', + 'io.netty.handler.stream.ChunkedStream', + 'io.netty.handler.stream.ChunkedWriteHandler', + 'io.netty.util.concurrent.GlobalEventExecutor', + 'javax.ws.rs.core.Context', + 'javax.ws.rs.core.MediaType', + 'javax.ws.rs.core.MultivaluedMap', + 'javax.ws.rs.core.Response$ResponseBuilder', + 'javax.ws.rs.core.Response$Status', + 'javax.ws.rs.core.Response', + 'javax.ws.rs.core.StreamingOutput', + 'javax.ws.rs.core.UriBuilder', + 'javax.ws.rs.ext.ExceptionMapper', + 'jdiff.JDiff', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.avro.Schema', + 'org.apache.avro.file.DataFileReader', + 'org.apache.avro.file.FileReader', + 'org.apache.avro.file.SeekableInput', + 'org.apache.avro.generic.GenericDatumReader', + 'org.apache.avro.generic.GenericDatumWriter', + 'org.apache.avro.io.BinaryDecoder', + 'org.apache.avro.io.BinaryEncoder', + 'org.apache.avro.io.DatumReader', + 'org.apache.avro.io.DatumWriter', + 'org.apache.avro.io.DecoderFactory', + 'org.apache.avro.io.EncoderFactory', + 'org.apache.avro.io.JsonEncoder', + 'org.apache.avro.reflect.ReflectData', + 'org.apache.avro.reflect.ReflectDatumReader', + 'org.apache.avro.reflect.ReflectDatumWriter', + 'org.apache.avro.specific.SpecificDatumReader', + 'org.apache.avro.specific.SpecificDatumWriter', + 'org.apache.avro.specific.SpecificRecord', + 'org.apache.commons.beanutils.BeanUtils', + 'org.apache.commons.beanutils.DynaBean', + 'org.apache.commons.beanutils.DynaClass', + 'org.apache.commons.beanutils.DynaProperty', + 'org.apache.commons.beanutils.PropertyUtils', + 'org.apache.commons.compress.archivers.tar.TarArchiveEntry', + 'org.apache.commons.compress.archivers.tar.TarArchiveInputStream', + 'org.apache.commons.codec.DecoderException', + 'org.apache.commons.codec.binary.Base64', + 'org.apache.commons.codec.binary.Hex', + 'org.apache.commons.codec.digest.DigestUtils', + 'org.apache.commons.daemon.Daemon', + 'org.apache.commons.daemon.DaemonContext', + 'org.apache.commons.digester.AbstractObjectCreationFactory', + 'org.apache.commons.digester.CallMethodRule', + 'org.apache.commons.digester.Digester', + 'org.apache.commons.digester.ObjectCreationFactory', + 'org.apache.commons.digester.substitution.MultiVariableExpander', + 'org.apache.commons.digester.substitution.VariableSubstitutor', + 'org.apache.commons.digester.xmlrules.DigesterLoader', + 'org.apache.commons.httpclient.util.URIUtil', + 'org.apache.commons.jxpath.JXPathContext', + 'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl', + 'org.apache.commons.jxpath.ri.QName', + 'org.apache.commons.jxpath.ri.compiler.NodeNameTest', + 'org.apache.commons.jxpath.ri.compiler.NodeTest', + 'org.apache.commons.jxpath.ri.compiler.NodeTypeTest', + 'org.apache.commons.jxpath.ri.model.NodeIterator', + 'org.apache.commons.jxpath.ri.model.NodePointer', + 'org.apache.commons.jxpath.ri.model.NodePointerFactory', + 'org.apache.commons.math3.util.ArithmeticUtils', + 'org.apache.commons.net.ftp.FTPClient', + 'org.apache.commons.net.ftp.FTPFile', + 'org.apache.commons.net.ftp.FTPReply', + 'org.apache.commons.net.util.SubnetUtils$SubnetInfo', + 'org.apache.commons.net.util.SubnetUtils', + 'org.apache.curator.ensemble.fixed.FixedEnsembleProvider', + 'org.apache.curator.framework.CuratorFramework', + 'org.apache.curator.framework.CuratorFrameworkFactory$Builder', + 'org.apache.curator.framework.CuratorFrameworkFactory', + 'org.apache.curator.framework.api.ACLBackgroundPathAndBytesable', + 'org.apache.curator.framework.api.ACLProvider', + 'org.apache.curator.framework.api.BackgroundPathAndBytesable', + 'org.apache.curator.framework.api.ChildrenDeletable', + 'org.apache.curator.framework.api.CreateBuilder', + 'org.apache.curator.framework.api.DeleteBuilder', + 'org.apache.curator.framework.api.ExistsBuilder', + 'org.apache.curator.framework.api.GetChildrenBuilder', + 'org.apache.curator.framework.api.GetDataBuilder', + 'org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable', + 'org.apache.curator.framework.api.SetDataBuilder', + 'org.apache.curator.framework.api.WatchPathable', + 'org.apache.curator.framework.imps.DefaultACLProvider', + 'org.apache.curator.framework.listen.ListenerContainer', + 'org.apache.curator.framework.recipes.cache.ChildData', + 'org.apache.curator.framework.recipes.cache.PathChildrenCache$StartMode', + 'org.apache.curator.framework.recipes.cache.PathChildrenCache', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent$Type', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheListener', + 'org.apache.curator.framework.recipes.locks.Reaper$Mode', + 'org.apache.curator.framework.recipes.locks.Reaper', + 'org.apache.curator.framework.recipes.shared.SharedCount', + 'org.apache.curator.framework.recipes.shared.VersionedValue', + 'org.apache.curator.retry.ExponentialBackoffRetry', + 'org.apache.curator.retry.RetryNTimes', + 'org.apache.curator.utils.CloseableScheduledExecutorService', + 'org.apache.curator.utils.CloseableUtils', + 'org.apache.curator.utils.EnsurePath', + 'org.apache.curator.utils.PathUtils', + 'org.apache.curator.utils.ThreadUtils', + 'org.apache.curator.utils.ZKPaths', + 'org.apache.directory.server.kerberos.shared.keytab.Keytab', + 'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry', + 'org.apache.http.NameValuePair', + 'org.apache.http.client.utils.URIBuilder', + 'org.apache.http.client.utils.URLEncodedUtils', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.DirectoryScanner', + 'org.apache.tools.ant.Task', + 'org.apache.tools.ant.taskdefs.Execute', + 'org.apache.tools.ant.types.FileSet', + 'org.apache.xml.serialize.OutputFormat', + 'org.apache.xml.serialize.XMLSerializer', + 'org.apache.zookeeper.AsyncCallback$StatCallback', + 'org.apache.zookeeper.AsyncCallback$StringCallback', + 'org.apache.zookeeper.CreateMode', + 'org.apache.zookeeper.KeeperException$Code', + 'org.apache.zookeeper.KeeperException', + 'org.apache.zookeeper.WatchedEvent', + 'org.apache.zookeeper.Watcher$Event$EventType', + 'org.apache.zookeeper.Watcher$Event$KeeperState', + 'org.apache.zookeeper.Watcher', + 'org.apache.zookeeper.ZKUtil', + 'org.apache.zookeeper.ZooDefs$Ids', + 'org.apache.zookeeper.ZooKeeper', + 'org.apache.zookeeper.data.ACL', + 'org.apache.zookeeper.data.Id', + 'org.apache.zookeeper.data.Stat', + 'org.codehaus.jackson.JsonEncoding', + 'org.codehaus.jackson.JsonFactory', + 'org.codehaus.jackson.JsonGenerator', + 'org.codehaus.jackson.JsonGenerator$Feature', + 'org.codehaus.jackson.JsonNode', + 'org.codehaus.jackson.map.MappingJsonFactory', + 'org.codehaus.jackson.map.ObjectMapper', + 'org.codehaus.jackson.map.ObjectReader', + 'org.codehaus.jackson.map.ObjectWriter', + 'org.codehaus.jackson.node.ContainerNode', + 'org.codehaus.jackson.type.TypeReference', + 'org.codehaus.jackson.util.MinimalPrettyPrinter', + 'org.fusesource.leveldbjni.JniDBFactory', + 'org.iq80.leveldb.DB', + 'org.iq80.leveldb.Options', + 'org.iq80.leveldb.WriteBatch', + 'org.mortbay.jetty.Connector', + 'org.mortbay.jetty.Handler', + 'org.mortbay.jetty.InclusiveByteRange', + 'org.mortbay.jetty.MimeTypes', + 'org.mortbay.jetty.NCSARequestLog', + 'org.mortbay.jetty.RequestLog', + 'org.mortbay.jetty.Server', + 'org.mortbay.jetty.handler.ContextHandler$SContext', + 'org.mortbay.jetty.handler.ContextHandler', + 'org.mortbay.jetty.handler.ContextHandlerCollection', + 'org.mortbay.jetty.handler.HandlerCollection', + 'org.mortbay.jetty.handler.RequestLogHandler', + 'org.mortbay.jetty.nio.SelectChannelConnector', + 'org.mortbay.jetty.security.SslSocketConnector', + 'org.mortbay.jetty.servlet.AbstractSessionManager', + 'org.mortbay.jetty.servlet.Context', + 'org.mortbay.jetty.servlet.DefaultServlet', + 'org.mortbay.jetty.servlet.FilterHolder', + 'org.mortbay.jetty.servlet.FilterMapping', + 'org.mortbay.jetty.servlet.ServletHandler', + 'org.mortbay.jetty.servlet.ServletHolder', + 'org.mortbay.jetty.servlet.SessionHandler', + 'org.mortbay.jetty.webapp.WebAppContext', + 'org.mortbay.log.Log', + 'org.mortbay.thread.QueuedThreadPool', + 'org.mortbay.util.MultiException', + 'org.mortbay.util.ajax.JSON$Convertible', + 'org.mortbay.util.ajax.JSON$Output', + 'org.mortbay.util.ajax.JSON', + 'org.znerd.xmlenc.XMLOutputter', // internal java api: sun.net.dns.ResolverConfiguration // internal java api: sun.net.util.IPAddressUtil diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 90e4dd2d956..b11aa732b25 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -50,11 +50,16 @@ test { systemProperty 'tests.artifact', project.name } -// classes are missing, e.g. org.apache.log.Logger -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext - 'com.amazonaws.util.XpathUtils', + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', + + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', ] diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 3782f368af4..53406f1aad9 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -35,13 +35,14 @@ test { systemProperty 'tests.security.manager', 'false' } -// classes are missing, com.ibm.icu.lang.UCharacter -thirdPartyAudit.missingClasses = true thirdPartyAudit.excludes = [ - // uses internal java api: sun.misc.Unsafe - 'com.google.common.cache.Striped64', - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + + // missing class + 'com.ibm.icu.lang.UCharacter', ] diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 1c44cca344c..5039036be46 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -17,6 +17,8 @@ * under the License. */ +import org.elasticsearch.gradle.precommit.PrecommitTasks; + dependencies { compile "org.elasticsearch:elasticsearch:${version}" compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" @@ -33,5 +35,29 @@ dependencies { compileJava.options.compilerArgs << '-Xlint:-cast,-deprecation,-fallthrough,-overrides,-rawtypes,-serial,-try,-unchecked' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' -// we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! -thirdPartyAudit.missingClasses = true +// the main files are actually test files, so use the appopriate forbidden api sigs +forbiddenApisMain { + bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] + signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), + PrecommitTasks.getResource('/forbidden/test-signatures.txt')] +} + +// TODO: should we have licenses for our test deps? +dependencyLicenses.enabled = false + +thirdPartyAudit.excludes = [ + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + // we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.DirectoryScanner', + 'org.apache.tools.ant.Task', + 'org.apache.tools.ant.types.FileSet', + 'org.easymock.EasyMock', + 'org.easymock.IArgumentMatcher', + 'org.jmock.core.Constraint', +] From 3b06cd8d92ea30b2375db06701434b7346621eb2 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 29 Dec 2015 09:17:47 +0100 Subject: [PATCH 267/322] Fix pharse suggest collate example to use correct script context/syntax Closes #15675 --- docs/reference/search/suggesters/phrase-suggest.asciidoc | 6 ++++-- .../elasticsearch/messy/tests/SuggestSearchTests.java | 9 +++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index bc2f016d288..f88d486fce4 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -192,8 +192,10 @@ curl -XPOST 'localhost:9200/_search' -d { } ], "collate": { "query": { <1> - "match": { - "{{field_name}}" : "{{suggestion}}" <2> + "inline" : { + "match": { + "{{field_name}}" : "{{suggestion}}" <2> + } } }, "params": {"field_name" : "title"}, <3> diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index a0699a35534..b0f2f576249 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -82,12 +82,12 @@ import static org.hamcrest.Matchers.nullValue; * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ public class SuggestSearchTests extends ESIntegTestCase { - + @Override protected Collection> nodePlugins() { return Collections.singleton(MustachePlugin.class); } - + // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { createIndex("test"); @@ -618,7 +618,7 @@ public class SuggestSearchTests extends ESIntegTestCase { // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); } - + private List readMarvelHeroNames() throws IOException, URISyntaxException { return Files.readAllLines(PathUtils.get(Suggest.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8); } @@ -1166,11 +1166,12 @@ public class SuggestSearchTests extends ESIntegTestCase { String filterString = XContentFactory.jsonBuilder() .startObject() .startObject("match_phrase") - .field("title", "{{suggestion}}") + .field("{{field}}", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); + filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); From 60cbb2d7bcadcd04fbd6cfb2c20bb3f95c49dda1 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 29 Dec 2015 10:38:58 +0100 Subject: [PATCH 268/322] [TEST] Protect UpgradeIT from using too many replicas --- .../action/admin/indices/upgrade/UpgradeIT.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index c27f4c85a96..9d8002210e7 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -61,6 +61,11 @@ public class UpgradeIT extends ESBackcompatTestCase { return 2; } + @Override + protected int maximumNumberOfReplicas() { + return Math.max(0, Math.min(backwardsCluster().numBackwardsDataNodes(), backwardsCluster().numNewDataNodes()) - 1); + } + public void testUpgrade() throws Exception { // allow the cluster to rebalance quickly - 2 concurrent rebalance are default we can do higher Settings.Builder builder = Settings.builder(); From a49fe189b0f74b28bd1c9727dda8258ceec912f8 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 1 Dec 2015 11:35:56 +0100 Subject: [PATCH 269/322] Support global `repositories.azure.` settings All those repository settings can also be defined globally in `elasticsearch.yml` file using prefix `repositories.azure.`. For example: ```yml repositories.azure: container: backup-container base_path: backups chunk_size: 32m compress": true ``` Closes #13776. --- docs/plugins/repository-azure.asciidoc | 18 +++++++++++ .../cloud/azure/blobstore/AzureBlobStore.java | 11 ++++--- .../azure/storage/AzureStorageService.java | 7 ++-- .../azure/storage/AzureStorageSettings.java | 32 +++++++++++++++++-- .../storage/AzureStorageSettingsFilter.java | 5 ++- .../repositories/azure/AzureRepository.java | 25 +++++++++------ ...bstractAzureRepositoryServiceTestCase.java | 4 +-- .../AzureStorageSettingsFilterTest.java | 7 ++-- .../azure/AzureSettingsParserTest.java | 4 +-- 9 files changed, 85 insertions(+), 28 deletions(-) diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 9846b5fbf58..7fc3c54d007 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -64,6 +64,8 @@ cloud: `my_account1` is the default account which will be used by a repository unless you set an explicit one. +[[repository-azure-repository-settings]] +===== Repository settings The Azure repository supports following settings: @@ -155,6 +157,22 @@ client.admin().cluster().preparePutRepository("my_backup_java1") ).get(); ---- +[[repository-azure-global-settings]] +===== Global repositories settings + +All those repository settings can also be defined globally in `elasticsearch.yml` file using prefix +`repositories.azure.`. For example: + +[source,yaml] +---- +repositories.azure: + container: backup-container + base_path: backups + chunk_size: 32m + compress": true +---- + + [[repository-azure-validation]] ===== Repository validation rules diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 99a505c5666..cf97008249f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.azure.blobstore; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.storage.AzureStorageService; +import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -31,6 +32,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.repositories.azure.AzureRepository.Defaults; import java.io.InputStream; import java.io.OutputStream; @@ -38,8 +40,7 @@ import java.net.URISyntaxException; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage.CONTAINER; -import static org.elasticsearch.repositories.azure.AzureRepository.CONTAINER_DEFAULT; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettings; import static org.elasticsearch.repositories.azure.AzureRepository.Repository; public class AzureBlobStore extends AbstractComponent implements BlobStore { @@ -56,13 +57,13 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { AzureStorageService client) throws URISyntaxException, StorageException { super(settings); this.client = client.start(); - this.container = repositorySettings.settings().get("container", settings.get(CONTAINER, CONTAINER_DEFAULT)); + this.container = getRepositorySettings(repositorySettings, Repository.CONTAINER, Storage.CONTAINER, Defaults.CONTAINER); this.repositoryName = name.getName(); // NOTE: null account means to use the first one specified in config - this.accountName = repositorySettings.settings().get(Repository.ACCOUNT, null); + this.accountName = getRepositorySettings(repositorySettings, Repository.ACCOUNT, Storage.ACCOUNT, null); - String modeStr = repositorySettings.settings().get(Repository.LOCATION_MODE, null); + String modeStr = getRepositorySettings(repositorySettings, Repository.LOCATION_MODE, Storage.LOCATION_MODE, null); if (modeStr == null) { this.locMode = LocationMode.PRIMARY_ONLY; } else { diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 8c20bb6d873..e225f420d0e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -37,9 +37,12 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; @Deprecated - public static final String ACCOUNT = "cloud.azure.storage.account"; + public static final String ACCOUNT_DEPRECATED = "cloud.azure.storage.account"; @Deprecated - public static final String KEY = "cloud.azure.storage.key"; + public static final String KEY_DEPRECATED = "cloud.azure.storage.key"; + + public static final String ACCOUNT = "repositories.azure.account"; + public static final String LOCATION_MODE = "repositories.azure.location_mode"; public static final String CONTAINER = "repositories.azure.container"; public static final String BASE_PATH = "repositories.azure.base_path"; public static final String CHUNK_SIZE = "repositories.azure.chunk_size"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 7fd0312df29..77be71a7c95 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -24,6 +24,8 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.repositories.RepositorySettings; import java.util.HashMap; import java.util.Map; @@ -73,11 +75,11 @@ public class AzureStorageSettings { Map secondaryStorage = new HashMap<>(); // We check for deprecated settings - String account = settings.get(Storage.ACCOUNT); - String key = settings.get(Storage.KEY); + String account = settings.get(Storage.ACCOUNT_DEPRECATED); + String key = settings.get(Storage.KEY_DEPRECATED); if (account != null) { logger.warn("[{}] and [{}] have been deprecated. Use now [{}xxx.account] and [{}xxx.key] where xxx is any name", - Storage.ACCOUNT, Storage.KEY, Storage.PREFIX, Storage.PREFIX); + Storage.ACCOUNT_DEPRECATED, Storage.KEY_DEPRECATED, Storage.PREFIX, Storage.PREFIX); primaryStorage = new AzureStorageSettings(null, account, key); } else { Settings storageSettings = settings.getByPrefix(Storage.PREFIX); @@ -119,4 +121,28 @@ public class AzureStorageSettings { return Tuple.tuple(primaryStorage, secondaryStorage); } + + public static String getRepositorySettings(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + String defaultValue) { + return repositorySettings.settings().get(repositorySettingName, + repositorySettings.globalSettings().get(repositoriesSettingName, defaultValue)); + } + + public static ByteSizeValue getRepositorySettingsAsBytesSize(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + ByteSizeValue defaultValue) { + return repositorySettings.settings().getAsBytesSize(repositorySettingName, + repositorySettings.globalSettings().getAsBytesSize(repositoriesSettingName, defaultValue)); + } + + public static Boolean getRepositorySettingsAsBoolean(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + Boolean defaultValue) { + return repositorySettings.settings().getAsBoolean(repositorySettingName, + repositorySettings.globalSettings().getAsBoolean(repositoriesSettingName, defaultValue)); + } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java index c061d262f0b..2c4e7957af3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java @@ -19,6 +19,7 @@ package org.elasticsearch.cloud.azure.storage; +import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -30,6 +31,8 @@ public class AzureStorageSettingsFilter extends AbstractComponent { public AzureStorageSettingsFilter(Settings settings, SettingsFilter settingsFilter) { super(settings); // Cloud storage API settings needed to be hidden - settingsFilter.addFilter("cloud.azure.storage.*"); + settingsFilter.addFilter(Storage.PREFIX + "*.account"); + settingsFilter.addFilter(Storage.PREFIX + "*.key"); + settingsFilter.addFilter(Storage.ACCOUNT); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 60930a3a946..a3abf9b4adf 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -43,6 +43,10 @@ import java.net.URISyntaxException; import java.util.List; import java.util.Locale; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettings; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettingsAsBoolean; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettingsAsBytesSize; + /** * Azure file system implementation of the BlobStoreRepository *

        @@ -57,7 +61,13 @@ import java.util.Locale; public class AzureRepository extends BlobStoreRepository { public final static String TYPE = "azure"; - public final static String CONTAINER_DEFAULT = "elasticsearch-snapshots"; + + static public final class Defaults { + public static final String CONTAINER = "elasticsearch-snapshots"; + public static final ByteSizeValue CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); + public static final Boolean COMPRESS = false; + } + static public final class Repository { public static final String ACCOUNT = "account"; @@ -83,21 +93,18 @@ public class AzureRepository extends BlobStoreRepository { AzureBlobStore azureBlobStore) throws IOException, URISyntaxException, StorageException { super(name.getName(), repositorySettings, indexShardRepository); - String container = repositorySettings.settings().get(Repository.CONTAINER, - settings.get(Storage.CONTAINER, CONTAINER_DEFAULT)); + String container = getRepositorySettings(repositorySettings, Repository.CONTAINER, Storage.CONTAINER, Defaults.CONTAINER); this.blobStore = azureBlobStore; - this.chunkSize = repositorySettings.settings().getAsBytesSize(Repository.CHUNK_SIZE, - settings.getAsBytesSize(Storage.CHUNK_SIZE, new ByteSizeValue(64, ByteSizeUnit.MB))); + this.chunkSize = getRepositorySettingsAsBytesSize(repositorySettings, Repository.CHUNK_SIZE, Storage.CHUNK_SIZE, Defaults.CHUNK_SIZE); if (this.chunkSize.getMb() > 64) { logger.warn("azure repository does not support yet size > 64mb. Fall back to 64mb."); this.chunkSize = new ByteSizeValue(64, ByteSizeUnit.MB); } - this.compress = repositorySettings.settings().getAsBoolean(Repository.COMPRESS, - settings.getAsBoolean(Storage.COMPRESS, false)); - String modeStr = repositorySettings.settings().get(Repository.LOCATION_MODE, null); + this.compress = getRepositorySettingsAsBoolean(repositorySettings, Repository.COMPRESS, Storage.COMPRESS, Defaults.COMPRESS); + String modeStr = getRepositorySettings(repositorySettings, Repository.LOCATION_MODE, Storage.LOCATION_MODE, null); if (modeStr != null) { LocationMode locationMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); if (locationMode == LocationMode.SECONDARY_ONLY) { @@ -109,7 +116,7 @@ public class AzureRepository extends BlobStoreRepository { readonly = false; } - String basePath = repositorySettings.settings().get(Repository.BASE_PATH, null); + String basePath = getRepositorySettings(repositorySettings, Repository.BASE_PATH, Storage.BASE_PATH, null); if (Strings.hasLength(basePath)) { // Remove starting / if any diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java index 05da2ccfceb..b3e878927e9 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java @@ -81,8 +81,8 @@ public abstract class AbstractAzureRepositoryServiceTestCase extends AbstractAzu .put(Storage.CONTAINER, "snapshots"); // We use sometime deprecated settings in tests - builder.put(Storage.ACCOUNT, "mock_azure_account") - .put(Storage.KEY, "mock_azure_key"); + builder.put(Storage.ACCOUNT_DEPRECATED, "mock_azure_account") + .put(Storage.KEY_DEPRECATED, "mock_azure_key"); return builder.build(); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java index bbffba492c4..eaaf9c224d8 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java @@ -29,8 +29,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.contains; public class AzureStorageSettingsFilterTest extends ESTestCase { final static Settings settings = Settings.builder() @@ -52,7 +51,7 @@ public class AzureStorageSettingsFilterTest extends ESTestCase { // Test using direct filtering Settings filteredSettings = SettingsFilter.filterSettings(settingsFilter.getPatterns(), settings); - assertThat(filteredSettings.getAsMap().keySet(), is(empty())); + assertThat(filteredSettings.getAsMap().keySet(), contains("cloud.azure.storage.azure1.default")); // Test using toXContent filtering RestRequest request = new FakeRestRequest(); @@ -63,7 +62,7 @@ public class AzureStorageSettingsFilterTest extends ESTestCase { xContentBuilder.endObject(); String filteredSettingsString = xContentBuilder.string(); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString).build(); - assertThat(filteredSettings.getAsMap().keySet(), is(empty())); + assertThat(filteredSettings.getAsMap().keySet(), contains("cloud.azure.storage.azure1.default")); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java index dfb7d4517d6..59e8b8945c0 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java @@ -68,8 +68,8 @@ public class AzureSettingsParserTest extends LuceneTestCase { public void testDeprecatedSettings() { Settings settings = Settings.builder() - .put(Storage.ACCOUNT, "myaccount1") - .put(Storage.KEY, "mykey1") + .put(Storage.ACCOUNT_DEPRECATED, "myaccount1") + .put(Storage.KEY_DEPRECATED, "mykey1") .build(); Tuple> tuple = AzureStorageSettings.parse(settings); From 3076377fdb668711f4b47906155c1f9fb4b0233e Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 29 Dec 2015 11:23:28 +0100 Subject: [PATCH 270/322] Remove ICU Plugin in reference guide This documentation lives now in plugins documentation at https://www.elastic.co/guide/en/elasticsearch/plugins/current/analysis-icu.html. We don't need a copy in analysis reference guide. --- docs/reference/analysis.asciidoc | 2 - docs/reference/analysis/icu-plugin.asciidoc | 246 -------------------- 2 files changed, 248 deletions(-) delete mode 100644 docs/reference/analysis/icu-plugin.asciidoc diff --git a/docs/reference/analysis.asciidoc b/docs/reference/analysis.asciidoc index 7009ca35444..8461b5c010f 100644 --- a/docs/reference/analysis.asciidoc +++ b/docs/reference/analysis.asciidoc @@ -73,5 +73,3 @@ include::analysis/tokenfilters.asciidoc[] include::analysis/charfilters.asciidoc[] -include::analysis/icu-plugin.asciidoc[] - diff --git a/docs/reference/analysis/icu-plugin.asciidoc b/docs/reference/analysis/icu-plugin.asciidoc deleted file mode 100644 index 9c979464c80..00000000000 --- a/docs/reference/analysis/icu-plugin.asciidoc +++ /dev/null @@ -1,246 +0,0 @@ -[[analysis-icu-plugin]] -== ICU Analysis Plugin - -The http://icu-project.org/[ICU] analysis plugin allows for unicode -normalization, collation and folding. The plugin is called -https://github.com/elasticsearch/elasticsearch-analysis-icu[elasticsearch-analysis-icu]. - -The plugin includes the following analysis components: - -[float] -[[icu-normalization]] -=== ICU Normalization - -Normalizes characters as explained -http://userguide.icu-project.org/transforms/normalization[here]. It -registers itself by default under `icu_normalizer` or `icuNormalizer` -using the default settings. Allows for the name parameter to be provided -which can include the following values: `nfc`, `nfkc`, and `nfkc_cf`. -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "normalization" : { - "tokenizer" : "keyword", - "filter" : ["icu_normalizer"] - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-folding]] -=== ICU Folding - -Folding of unicode characters based on `UTR#30`. It registers itself -under `icu_folding` and `icuFolding` names. -The filter also does lowercasing, which means the lowercase filter can -normally be left out. Sample setting: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "folding" : { - "tokenizer" : "keyword", - "filter" : ["icu_folding"] - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-filtering]] -==== Filtering - -The folding can be filtered by a set of unicode characters with the -parameter `unicodeSetFilter`. This is useful for a non-internationalized -search engine where retaining a set of national characters which are -primary letters in a specific language is wanted. See syntax for the -UnicodeSet -http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[here]. - -The Following example exempts Swedish characters from the folding. Note -that the filtered characters are NOT lowercased which is why we add that -filter below. - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "folding" : { - "tokenizer" : "standard", - "filter" : ["my_icu_folding", "lowercase"] - } - } - "filter" : { - "my_icu_folding" : { - "type" : "icu_folding" - "unicodeSetFilter" : "[^åäöÅÄÖ]" - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-collation]] -=== ICU Collation - -Uses collation token filter. Allows to either specify the rules for -collation (defined -http://www.icu-project.org/userguide/Collate_Customization.html[here]) -using the `rules` parameter (can point to a location or expressed in the -settings, location can be relative to config location), or using the -`language` parameter (further specialized by country and variant). By -default registers under `icu_collation` or `icuCollation` and uses the -default locale. - -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "filter" : ["icu_collation"] - } - } - } - } -} --------------------------------------------------- - -And here is a sample of custom collation: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "filter" : ["myCollator"] - } - }, - "filter" : { - "myCollator" : { - "type" : "icu_collation", - "language" : "en" - } - } - } - } -} --------------------------------------------------- - -[float] -==== Options - -[horizontal] -`strength`:: - The strength property determines the minimum level of difference considered significant during comparison. - The default strength for the Collator is `tertiary`, unless specified otherwise by the locale used to create the Collator. - Possible values: `primary`, `secondary`, `tertiary`, `quaternary` or `identical`. - + - See http://icu-project.org/apiref/icu4j/com/ibm/icu/text/Collator.html[ICU Collation] documentation for a more detailed - explanation for the specific values. - -`decomposition`:: - Possible values: `no` or `canonical`. Defaults to `no`. Setting this decomposition property with - `canonical` allows the Collator to handle un-normalized text properly, producing the same results as if the text were - normalized. If `no` is set, it is the user's responsibility to insure that all text is already in the appropriate form - before a comparison or before getting a CollationKey. Adjusting decomposition mode allows the user to select between - faster and more complete collation behavior. Since a great many of the world's languages do not require text - normalization, most locales set `no` as the default decomposition mode. - -[float] -==== Expert options: - -[horizontal] -`alternate`:: - Possible values: `shifted` or `non-ignorable`. Sets the alternate handling for strength `quaternary` - to be either shifted or non-ignorable. What boils down to ignoring punctuation and whitespace. - -`caseLevel`:: - Possible values: `true` or `false`. Default is `false`. Whether case level sorting is required. When - strength is set to `primary` this will ignore accent differences. - -`caseFirst`:: - Possible values: `lower` or `upper`. Useful to control which case is sorted first when case is not ignored - for strength `tertiary`. - -`numeric`:: - Possible values: `true` or `false`. Whether digits are sorted according to numeric representation. For - example the value `egg-9` is sorted before the value `egg-21`. Defaults to `false`. - -`variableTop`:: - Single character or contraction. Controls what is variable for `alternate`. - -`hiraganaQuaternaryMode`:: - Possible values: `true` or `false`. Defaults to `false`. Distinguishing between Katakana and - Hiragana characters in `quaternary` strength . - -[float] -=== ICU Tokenizer - -Breaks text into words according to UAX #29: Unicode Text Segmentation ((http://www.unicode.org/reports/tr29/)). - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "icu_tokenizer", - } - } - } - } -} --------------------------------------------------- - - -[float] -=== ICU Normalization CharFilter - -Normalizes characters as explained http://userguide.icu-project.org/transforms/normalization[here]. -It registers itself by default under `icu_normalizer` or `icuNormalizer` using the default settings. -Allows for the name parameter to be provided which can include the following values: `nfc`, `nfkc`, and `nfkc_cf`. -Allows for the mode parameter to be provided which can include the following values: `compose` and `decompose`. -Use `decompose` with `nfc` or `nfkc`, to get `nfd` or `nfkd`, respectively. -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "char_filter" : ["icu_normalizer"] - } - } - } - } -} --------------------------------------------------- From 96b3166c6da24c6d8ecd22e5a38c39d087cd8f18 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sat, 28 Nov 2015 12:59:09 +0100 Subject: [PATCH 271/322] Add timeout settings (default to 5 minutes) By default, azure does not timeout. This commit adds support for a timeout settings which defaults to 5 minutes. It's a timeout **per request** not a global timeout for a snapshot request. It can be defined globally, per account or both. Defaults to `5m`. ```yml cloud: azure: storage: timeout: 10s my_account1: account: your_azure_storage_account1 key: your_azure_storage_key1 default: true my_account2: account: your_azure_storage_account2 key: your_azure_storage_key2 timeout: 30s ``` In this example, timeout will be 10s for `my_account1` and 30s for `my_account2`. Closes #14277. --- docs/plugins/repository-azure.asciidoc | 21 +++++++++++++++++ .../azure/storage/AzureStorageService.java | 2 ++ .../storage/AzureStorageServiceImpl.java | 8 +++++++ .../azure/storage/AzureStorageSettings.java | 18 ++++++++++++--- .../storage/AzureStorageServiceTest.java | 23 +++++++++++++++++++ 5 files changed, 69 insertions(+), 3 deletions(-) diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 7fc3c54d007..c93419de260 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -64,6 +64,27 @@ cloud: `my_account1` is the default account which will be used by a repository unless you set an explicit one. +You can set the timeout to use when making any single request. It can be defined globally, per account or both. +Defaults to `5m`. + +[source,yaml] +---- +cloud: + azure: + storage: + timeout: 10s + my_account1: + account: your_azure_storage_account1 + key: your_azure_storage_key1 + default: true + my_account2: + account: your_azure_storage_account2 + key: your_azure_storage_key2 + timeout: 30s +---- + +In this example, timeout will be 10s for `my_account1` and 30s for `my_account2`. + [[repository-azure-repository-settings]] ===== Repository settings diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index e225f420d0e..9ed909c0b8f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -41,6 +41,8 @@ public interface AzureStorageService { @Deprecated public static final String KEY_DEPRECATED = "cloud.azure.storage.key"; + public static final String TIMEOUT = "cloud.azure.storage.timeout"; + public static final String ACCOUNT = "repositories.azure.account"; public static final String LOCATION_MODE = "repositories.azure.location_mode"; public static final String CONTAINER = "repositories.azure.container"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 3159b038998..8b453867de6 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -119,6 +119,14 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent map = (Map) storage.getValue(); - AzureStorageSettings current = new AzureStorageSettings(storage.getKey(), map.get("account"), map.get("key")); + TimeValue timeout = TimeValue.parseTimeValue(map.get("timeout"), globalTimeout, Storage.PREFIX + storage.getKey() + ".timeout"); + AzureStorageSettings current = new AzureStorageSettings(storage.getKey(), map.get("account"), map.get("key"), timeout); boolean activeByDefault = Boolean.parseBoolean(map.getOrDefault("default", "false")); if (activeByDefault) { if (primaryStorage == null) { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java index fbf4abc06e5..0c195f04cf5 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java @@ -37,6 +37,7 @@ public class AzureStorageServiceTest extends ESTestCase { .put("cloud.azure.storage.azure2.key", "mykey2") .put("cloud.azure.storage.azure3.account", "myaccount3") .put("cloud.azure.storage.azure3.key", "mykey3") + .put("cloud.azure.storage.azure3.timeout", "30s") .build(); public void testGetSelectedClientWithNoPrimaryAndSecondary() { @@ -89,6 +90,28 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); } + public void testGetSelectedClientGlobalTimeout() { + Settings timeoutSettings = Settings.builder() + .put(settings) + .put("cloud.azure.storage.timeout", "10s") + .build(); + + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings); + azureStorageService.doStart(); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(10 * 1000)); + CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); + } + + public void testGetSelectedClientDefaultTimeout() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); + azureStorageService.doStart(); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(5 * 60 * 1000)); + CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); + } /** * This internal class just overload createClient method which is called by AzureStorageServiceImpl.doStart() From 46a4aa970455ccbe788b81fac74967d97a766b63 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 29 Dec 2015 11:40:33 +0100 Subject: [PATCH 272/322] Tighten assertions in BulkProcessorRetryIT With this commit we check more precisely on the result of a bulk request. It could either be ok, fail or be rejected due to resource constraints. Previously, we have relied that by default we never get rejected. However, this is a valid condition even when retrying. With this commit we check that we either retried often enough that we don't get rejected *and* if we got rejected that we maxed out the number of specified retries. --- .../action/bulk/BulkProcessorRetryIT.java | 93 ++++++++++++++++--- 1 file changed, 82 insertions(+), 11 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 3fd32361215..78e22fda5c8 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -22,13 +22,13 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matcher; -import java.util.Collections; -import java.util.Set; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -70,6 +70,7 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { } private void executeBulkRejectionLoad(BackoffPolicy backoffPolicy, boolean rejectedExecutionExpected) throws Throwable { + final CorrelatingBackoffPolicy internalPolicy = new CorrelatingBackoffPolicy(backoffPolicy); int numberOfAsyncOps = randomIntBetween(600, 700); final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); final Set responses = Collections.newSetFromMap(new ConcurrentHashMap<>()); @@ -85,6 +86,7 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + internalPolicy.logResponse(response); responses.add(response); latch.countDown(); } @@ -97,7 +99,7 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { }).setBulkActions(1) // zero means that we're in the sync case, more means that we're in the async case .setConcurrentRequests(randomIntBetween(0, 100)) - .setBackoffPolicy(backoffPolicy) + .setBackoffPolicy(internalPolicy) .build(); indexDocs(bulkProcessor, numberOfAsyncOps); latch.await(10, TimeUnit.SECONDS); @@ -115,8 +117,14 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { Throwable rootCause = ExceptionsHelper.unwrapCause(failure.getCause()); if (rootCause instanceof EsRejectedExecutionException) { if (rejectedExecutionExpected == false) { - // we're not expecting that we overwhelmed it even once - throw new AssertionError("Unexpected failure reason", rootCause); + Iterator backoffState = internalPolicy.backoffStateFor(bulkResponse); + assertNotNull("backoffState is null (indicates a bulk request got rejected without retry)", backoffState); + if (backoffState.hasNext()) { + // we're not expecting that we overwhelmed it even once when we maxed out the number of retries + throw new AssertionError("Got rejected although backoff policy would allow more retries", rootCause); + } else { + logger.debug("We maxed out the number of bulk retries and got rejected (this is ok)."); + } } } else { throw new AssertionError("Unexpected failure", rootCause); @@ -134,12 +142,8 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { // validate we did not create any duplicates due to retries Matcher searchResultCount; - if (rejectedExecutionExpected) { - // it is ok if we lost some index operations to rejected executions - searchResultCount = lessThanOrEqualTo((long) numberOfAsyncOps); - } else { - searchResultCount = equalTo((long) numberOfAsyncOps); - } + // it is ok if we lost some index operations to rejected executions (which is possible even when backing off (although less likely) + searchResultCount = lessThanOrEqualTo((long) numberOfAsyncOps); SearchResponse results = client() .prepareSearch(INDEX_NAME) @@ -161,4 +165,71 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { .request()); } } + + /** + * Internal helper class to correlate backoff states with bulk responses. This is needed to check whether we maxed out the number + * of retries but still got rejected (which is perfectly fine and can also happen from time to time under heavy load). + * + * This implementation relies on an implementation detail in Retry, namely that the bulk listener is notified on the same thread + * as the last call to the backoff policy's iterator. The advantage is that this is non-invasive to the rest of the production code. + */ + private static class CorrelatingBackoffPolicy extends BackoffPolicy { + private final Map> correlations = new ConcurrentHashMap<>(); + // this is intentionally *not* static final. We will only ever have one instance of this class per test case and want the + // thread local to be eligible for garbage collection right after the test to avoid leaks. + private final ThreadLocal> iterators = new ThreadLocal<>(); + + private final BackoffPolicy delegate; + + private CorrelatingBackoffPolicy(BackoffPolicy delegate) { + this.delegate = delegate; + } + + public Iterator backoffStateFor(BulkResponse response) { + return correlations.get(response); + } + + // Assumption: This method is called from the same thread as the last call to the internal iterator's #hasNext() / #next() + // see also Retry.AbstractRetryHandler#onResponse(). + public void logResponse(BulkResponse response) { + Iterator iterator = iterators.get(); + // did we ever retry? + if (iterator != null) { + // we should correlate any iterator only once + iterators.remove(); + correlations.put(response, iterator); + } + } + + @Override + public Iterator iterator() { + return new CorrelatingIterator(iterators, delegate.iterator()); + } + + private static class CorrelatingIterator implements Iterator { + private final Iterator delegate; + private final ThreadLocal> iterators; + + private CorrelatingIterator(ThreadLocal> iterators, Iterator delegate) { + this.iterators = iterators; + this.delegate = delegate; + } + + @Override + public boolean hasNext() { + // update on every invocation as we might get rescheduled on a different thread. Unfortunately, there is a chance that + // we pollute the thread local map with stale values. Due to the implementation of Retry and the life cycle of the + // enclosing class CorrelatingBackoffPolicy this should not pose a major problem though. + iterators.set(this); + return delegate.hasNext(); + } + + @Override + public TimeValue next() { + // update on every invocation + iterators.set(this); + return delegate.next(); + } + } + } } From c813d21ffb8a1fc10e8ad50314b5b53cb40e7aaa Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 29 Dec 2015 12:08:17 +0100 Subject: [PATCH 273/322] Replace * import with explicit imports --- .../org/elasticsearch/action/bulk/BulkProcessorRetryIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 78e22fda5c8..503daba8c2a 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -28,7 +28,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matcher; -import java.util.*; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; From 39cec9f2ffa7a152efb507f48f06036ecdf71103 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 29 Dec 2015 14:53:10 +0100 Subject: [PATCH 274/322] [TEST] Improve test speed DedicatedClusterSnapshotRestoreIT#testRestoreIndexWithMissingShards took ~1.5 min to finish due to timeouts that are applied if not all shards are allocated. Now that the index that has unallocated shareds is not refreshed the test is more reasonable and runs in 15 sec --- .../indices/IndicesOptionsIntegrationIT.java | 32 +++++++++---------- .../DedicatedClusterSnapshotRestoreIT.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 6 ++-- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 66cb5e7ea7d..2723f49a77a 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -76,7 +76,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2"), true); verify(stats("test1", "test2"), true); verify(forceMerge("test1", "test2"), true); - verify(refresh("test1", "test2"), true); + verify(refreshBuilder("test1", "test2"), true); verify(validateQuery("test1", "test2"), true); verify(aliasExists("test1", "test2"), true); verify(typesExists("test1", "test2"), true); @@ -97,7 +97,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), true); verify(stats("test1", "test2").setIndicesOptions(options), true); verify(forceMerge("test1", "test2").setIndicesOptions(options), true); - verify(refresh("test1", "test2").setIndicesOptions(options), true); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), true); verify(validateQuery("test1", "test2").setIndicesOptions(options), true); verify(aliasExists("test1", "test2").setIndicesOptions(options), true); verify(typesExists("test1", "test2").setIndicesOptions(options), true); @@ -118,7 +118,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), false); verify(stats("test1", "test2").setIndicesOptions(options), false); verify(forceMerge("test1", "test2").setIndicesOptions(options), false); - verify(refresh("test1", "test2").setIndicesOptions(options), false); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false); verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); @@ -141,7 +141,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), false); verify(stats("test1", "test2").setIndicesOptions(options), false); verify(forceMerge("test1", "test2").setIndicesOptions(options), false); - verify(refresh("test1", "test2").setIndicesOptions(options), false); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false); verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); @@ -172,7 +172,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), true); verify(stats("test1").setIndicesOptions(options), true); verify(forceMerge("test1").setIndicesOptions(options), true); - verify(refresh("test1").setIndicesOptions(options), true); + verify(refreshBuilder("test1").setIndicesOptions(options), true); verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); @@ -193,7 +193,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -217,7 +217,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -240,7 +240,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), true); verify(stats("test1").setIndicesOptions(options), true); verify(forceMerge("test1").setIndicesOptions(options), true); - verify(refresh("test1").setIndicesOptions(options), true); + verify(refreshBuilder("test1").setIndicesOptions(options), true); verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); @@ -260,7 +260,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -283,7 +283,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -336,7 +336,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -358,7 +358,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices).setIndicesOptions(options), false); verify(stats(indices).setIndicesOptions(options), false); verify(forceMerge(indices).setIndicesOptions(options), false); - verify(refresh(indices).setIndicesOptions(options), false); + verify(refreshBuilder(indices).setIndicesOptions(options), false); verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); @@ -383,7 +383,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), false); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -405,7 +405,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -427,7 +427,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices).setIndicesOptions(options), false); verify(stats(indices).setIndicesOptions(options), false); verify(forceMerge(indices).setIndicesOptions(options), false); - verify(refresh(indices).setIndicesOptions(options), false); + verify(refreshBuilder(indices).setIndicesOptions(options), false); verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); @@ -770,7 +770,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { return client().admin().indices().prepareForceMerge(indices); } - private static RefreshRequestBuilder refresh(String... indices) { + private static RefreshRequestBuilder refreshBuilder(String... indices) { return client().admin().indices().prepareRefresh(indices); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index f5ca9211dff..dcea25617b2 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -432,7 +432,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest index("test-idx-all", "doc", Integer.toString(i), "foo", "bar" + i); index("test-idx-closed", "doc", Integer.toString(i), "foo", "bar" + i); } - refresh(); + refresh("test-idx-closed", "test-idx-all"); // don't refresh test-idx-some it will take 30 sec until it times out... assertThat(client().prepareSearch("test-idx-all").setSize(0).get().getHits().totalHits(), equalTo(100L)); assertAcked(client().admin().indices().prepareClose("test-idx-closed")); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 118120ecd73..4463f8066ee 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1233,10 +1233,10 @@ public abstract class ESIntegTestCase extends ESTestCase { * * @see #waitForRelocation() */ - protected final RefreshResponse refresh() { + protected final RefreshResponse refresh(String... indices) { waitForRelocation(); // TODO RANDOMIZE with flush? - RefreshResponse actionGet = client().admin().indices().prepareRefresh().execute().actionGet(); + RefreshResponse actionGet = client().admin().indices().prepareRefresh(indices).execute().actionGet(); assertNoFailures(actionGet); return actionGet; } @@ -1246,7 +1246,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void flushAndRefresh(String... indices) { flush(indices); - refresh(); + refresh(indices); } /** From d027ceb76df4f6c7d72c0716f027c91db1107919 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Tue, 29 Dec 2015 09:47:59 -0600 Subject: [PATCH 275/322] [TEST] Update GeoDistanceQuery to use Lucene's maxRadialDistance Removing maxRadialDistance method from ES GeoUtils in favor of Lucene 5.4 GeoDistanceUtils.maxRadialDistanceMeters. --- .../java/org/elasticsearch/common/geo/GeoUtils.java | 11 ++--------- .../index/query/GeoDistanceRangeQueryBuilder.java | 3 ++- .../index/query/GeoDistanceRangeQueryTests.java | 3 ++- 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index b4aaf5830ca..cec805e7a80 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.geo; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.apache.lucene.util.GeoDistanceUtils; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.unit.DistanceUnit; @@ -65,19 +66,11 @@ public class GeoUtils { /** Earth ellipsoid polar distance in meters */ public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS; - /** Returns the maximum distance/radius from the point 'center' before overlapping */ - public static double maxRadialDistance(GeoPoint center) { - if (Math.abs(center.lat()) == 90.0) { - return SloppyMath.haversin(center.lat(), center.lon(), 0, center.lon())*1000.0; - } - return SloppyMath.haversin(center.lat(), center.lon(), center.lat(), (180.0 + center.lon()) % 360)*1000.0; - } - /** Returns the minimum between the provided distance 'initialRadius' and the * maximum distance/radius from the point 'center' before overlapping **/ public static double maxRadialDistance(GeoPoint center, double initialRadius) { - final double maxRadius = maxRadialDistance(center); + final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(center.lon(), center.lat()); return Math.min(initialRadius, maxRadius); } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java index 4f09ff34b61..dc1c3d69817 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.GeoPointDistanceRangeQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.GeoDistanceUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; @@ -263,7 +264,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder Date: Tue, 29 Dec 2015 10:07:31 -0700 Subject: [PATCH 276/322] [TEST] Use a longer timeout for plugin manager in test --- .../test/java/org/elasticsearch/plugins/PluginManagerTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index cee98bc0163..bc92f894019 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -710,7 +710,7 @@ public class PluginManagerTests extends ESIntegTestCase { Channel channel = serverBootstrap.bind(new InetSocketAddress(InetAddress.getByName("localhost"), 0)); int port = ((InetSocketAddress) channel.getLocalAddress()).getPort(); // IO_ERROR because there is no real file delivered... - assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR); + assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 10s", port), ExitStatus.IO_ERROR); // ensure that we did not try any other data source like download.elastic.co, in case we specified our own local URL assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("download.elastic.co")))); From d898c0a6b02ebf90648298fdaf972f7c413089a7 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 30 Dec 2015 03:54:26 +0100 Subject: [PATCH 277/322] Removes not relevant part of the bw compat test. --- .../java/org/elasticsearch/index/mapper/core/TypeParsers.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 3ebb4e137e1..3569ab0d723 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -460,8 +460,7 @@ public class TypeParsers { } private static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) { - if (parserContext.indexVersionCreated().before(Version.V_3_0_0) && - "default".equals(value) && parserContext.getSimilarity(value) == null) { + if (parserContext.indexVersionCreated().before(Version.V_3_0_0) && "default".equals(value)) { // "default" similarity has been renamed into "classic" in 3.x. value = SimilarityService.DEFAULT_SIMILARITY; } From 6d3c9b074c02ca3429884f463af95ff553a1adfb Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 30 Dec 2015 10:11:14 +0100 Subject: [PATCH 278/322] Remove support for the `multi_field` type. It is officially unsupported since version 1.0. --- .../index/mapper/core/TypeParsers.java | 83 ------- .../elasticsearch/indices/IndicesModule.java | 2 - .../TokenCountFieldMapperIntegrationIT.java | 9 +- .../mapper/multifield/MultiFieldTests.java | 222 ------------------ .../search/highlight/HighlighterSearchIT.java | 51 ++-- .../suggest/CompletionSuggestSearchIT.java | 43 ---- .../mapper/multifield/merge/upgrade1.json | 11 +- .../mapper/multifield/merge/upgrade2.json | 11 +- .../mapper/multifield/merge/upgrade3.json | 5 +- .../multifield/test-multi-field-type.json | 55 ----- .../messy/tests/SuggestSearchTests.java | 28 +-- .../test/indices.put_mapping/10_basic.yaml | 11 - 12 files changed, 41 insertions(+), 490 deletions(-) delete mode 100644 core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 3ebba17d65c..30c887dd39b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.object.ObjectMapper; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -55,88 +54,6 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeSt */ public class TypeParsers { - public static final String MULTI_FIELD_CONTENT_TYPE = "multi_field"; - public static final Mapper.TypeParser multiFieldConverterTypeParser = new Mapper.TypeParser() { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - FieldMapper.Builder mainFieldBuilder = null; - List fields = null; - String firstType = null; - - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("fields")) { - Map fieldsNode = (Map) fieldNode; - for (Iterator> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) { - Map.Entry entry1 = fieldsIterator.next(); - String propName = entry1.getKey(); - Map propNode = (Map) entry1.getValue(); - - String type; - Object typeNode = propNode.get("type"); - if (typeNode != null) { - type = typeNode.toString(); - if (firstType == null) { - firstType = type; - } - } else { - throw new MapperParsingException("no type specified for property [" + propName + "]"); - } - - Mapper.TypeParser typeParser = parserContext.typeParser(type); - if (typeParser == null) { - throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + fieldName + "]"); - } - if (propName.equals(name)) { - mainFieldBuilder = (FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext); - fieldsIterator.remove(); - } else { - if (fields == null) { - fields = new ArrayList<>(2); - } - fields.add((FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext)); - fieldsIterator.remove(); - } - } - fieldsNode.remove("type"); - DocumentMapperParser.checkNoRemainingFields(fieldName, fieldsNode, parserContext.indexVersionCreated()); - iterator.remove(); - } - } - - if (mainFieldBuilder == null) { - if (fields == null) { - // No fields at all were specified in multi_field, so lets return a non indexed string field. - return new StringFieldMapper.Builder(name).index(false); - } - Mapper.TypeParser typeParser = parserContext.typeParser(firstType); - if (typeParser == null) { - // The first multi field's type is unknown - mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); - } else { - Mapper.Builder substitute = typeParser.parse(name, Collections.emptyMap(), parserContext); - if (substitute instanceof FieldMapper.Builder) { - mainFieldBuilder = ((FieldMapper.Builder) substitute).index(false); - } else { - // The first multi isn't a core field type - mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); - } - } - } - - if (fields != null) { - for (Mapper.Builder field : fields) { - mainFieldBuilder.addMultiField(field); - } - } - return mainFieldBuilder; - } - - }; - public static final String DOC_VALUES = "doc_values"; public static final String INDEX_OPTIONS_DOCS = "docs"; public static final String INDEX_OPTIONS_FREQS = "freqs"; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 61210bb0413..ebeca4e235b 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.ShortFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; -import org.elasticsearch.index.mapper.core.TypeParsers; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper; @@ -210,7 +209,6 @@ public class IndicesModule extends AbstractModule { registerMapper(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); registerMapper(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); registerMapper(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); - registerMapper(TypeParsers.MULTI_FIELD_CONTENT_TYPE, TypeParsers.multiFieldConverterTypeParser); registerMapper(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); registerMapper(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index 8a1b42f862a..facc1eb41ad 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -112,13 +112,10 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { .startObject("test") .startObject("properties") .startObject("foo") - .field("type", "multi_field") + .field("type", "string") + .field("store", storeCountedFields) + .field("analyzer", "simple") .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("store", storeCountedFields) - .field("analyzer", "simple") - .endObject() .startObject("token_count") .field("type", "token_count") .field("analyzer", "standard") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 016c3b58144..b9d157fbb4c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -67,10 +67,6 @@ import static org.hamcrest.Matchers.notNullValue; * */ public class MultiFieldTests extends ESSingleNodeTestCase { - public void testMultiFieldMultiFieldType() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json"); - testMultiField(mapping); - } public void testMultiFieldMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-fields.json"); @@ -186,224 +182,6 @@ public class MultiFieldTests extends ESSingleNodeTestCase { assertEquals(IndexOptions.NONE, f.fieldType().indexOptions()); } - public void testConvertMultiFieldNoDefaultField() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - assertNull(doc.getField("name")); - IndexableField f = doc.getField("name.indexed"); - assertThat(f.name(), equalTo("name.indexed")); - assertThat(f.stringValue(), equalTo("some name")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("name.not_indexed"); - assertThat(f.name(), equalTo("name.not_indexed")); - assertThat(f.stringValue(), equalTo("some name")); - assertThat(f.fieldType().stored(), equalTo(true)); - assertEquals(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("name"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name"), instanceOf(StringFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(StringFieldMapper.class)); - assertNotNull(docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(StringFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true)); - - assertNull(doc.getField("age")); - f = doc.getField("age.not_stored"); - assertThat(f.name(), equalTo("age.not_stored")); - assertThat(f.numericValue(), equalTo((Number) 28L)); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("age.stored"); - assertThat(f.name(), equalTo("age.stored")); - assertThat(f.numericValue(), equalTo((Number) 28L)); - assertThat(f.fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("age"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age"), instanceOf(LongFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("age").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("age").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("age.not_stored"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age.not_stored"), instanceOf(LongFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("age.not_stored").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age.not_stored").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("age.not_stored").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("age.stored"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age.stored"), instanceOf(LongFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("age.stored").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age.stored").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("age.stored").fieldType().tokenized(), equalTo(false)); - } - - public void testConvertMultiFieldGeoPoint() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json"); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - assertThat(docMapper.mappers().getMapper("a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("a.b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(BaseGeoPointFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a.b").fieldType().indexOptions()); - final boolean stored = indexCreatedBefore22 == false; - assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(stored)); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().tokenized(), equalTo(false)); - final boolean hasDocValues = indexCreatedBefore22 == false; - assertThat(docMapper.mappers().getMapper("a.b").fieldType().hasDocValues(), equalTo(hasDocValues)); - - BytesReference json = jsonBuilder().startObject() - .field("a", "-1,-1") - .endObject().bytes(); - Document doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - IndexableField f = doc.getField("a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a")); - assertThat(f.stringValue(), equalTo("-1,-1")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("a.b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a.b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b"), instanceOf(BaseGeoPointFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(stored)); - assertThat(docMapper.mappers().getMapper("b").fieldType().tokenized(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b").fieldType().hasDocValues(), equalTo(hasDocValues)); - - assertThat(docMapper.mappers().getMapper("b.a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b.a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b.a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().tokenized(), equalTo(false)); - - json = jsonBuilder().startObject() - .field("b", "-1,-1") - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getField("b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - assertThat(f.stringValue(), equalTo("-1,-1")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - } - - public void testConvertMultiFieldCompletion() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-completion.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - assertThat(docMapper.mappers().getMapper("a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("a.b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(CompletionFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a.b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().tokenized(), equalTo(true)); - - BytesReference json = jsonBuilder().startObject() - .field("a", "complete me") - .endObject().bytes(); - Document doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - IndexableField f = doc.getField("a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("a.b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a.b")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b"), instanceOf(CompletionFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("b.a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b.a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b.a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().tokenized(), equalTo(false)); - - json = jsonBuilder().startObject() - .field("b", "complete me") - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getField("b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - } - // The underlying order of the fields in multi fields in the mapping source should always be consistent, if not this // can to unnecessary re-syncing of the mappings between the local instance and cluster state public void testMultiFieldsInConsistentOrder() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 655dd82071b..41fe4975e4b 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -233,13 +233,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { .field("search_analyzer", "search_autocomplete") .field("term_vector", "with_positions_offsets") .endObject() - .startObject("name") + .endObject() .field("type", "string") .endObject() .endObject() - .field("type", "multi_field") - .endObject() - .endObject() .endObject()) .setSettings(settingsBuilder() .put(indexSettings()) @@ -900,14 +897,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("foo") - .field("type", "multi_field") + .field("type", "string") + .field("termVector", "with_positions_offsets") + .field("store", "yes") + .field("analyzer", "english") .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("termVector", "with_positions_offsets") - .field("store", "yes") - .field("analyzer", "english") - .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") @@ -916,14 +910,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .endObject() .endObject() .startObject("bar") - .field("type", "multi_field") + .field("type", "string") + .field("termVector", "with_positions_offsets") + .field("store", "yes") + .field("analyzer", "english") .startObject("fields") - .startObject("bar") - .field("type", "string") - .field("termVector", "with_positions_offsets") - .field("store", "yes") - .field("analyzer", "english") - .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") @@ -1194,8 +1185,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1222,8 +1213,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1252,8 +1243,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1282,8 +1273,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -2219,8 +2210,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -2251,8 +2242,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index e071d4374c2..fac7f71446a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -601,49 +601,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("r", "Foo Fighters"); } - public void testThatUpgradeToMultiFieldTypeWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "multi_field") - .startObject("fields") - .startObject(FIELD).field("type", "string").endObject() - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - new CompletionSuggestionBuilder("suggs").field(FIELD + ".suggest").text("f").size(10) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder() .startObject() diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json index 6206592afcb..595f62210dd 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json @@ -2,13 +2,10 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"analyzed", + store:"yes", "fields":{ - "name":{ - type:"string", - index:"analyzed", - store:"yes" - }, "indexed":{ type:"string", index:"analyzed" @@ -22,4 +19,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json index 4a8fbf66ced..3cfca9c313e 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json @@ -2,13 +2,10 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"analyzed", + store:"yes", "fields":{ - "name":{ - type:"string", - index:"analyzed", - store:"yes" - }, "indexed":{ type:"string", index:"analyzed" @@ -27,4 +24,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json index 9b309789f50..046b0c234d4 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json @@ -2,7 +2,8 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"no", "fields":{ "not_indexed3":{ type:"string", @@ -13,4 +14,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json deleted file mode 100644 index b099b9ab208..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "person":{ - "properties":{ - "name":{ - "type":"multi_field", - "fields":{ - "name":{ - "type":"string", - "index":"analyzed", - "store":"yes" - }, - "indexed":{ - "type":"string", - "index":"analyzed" - }, - "not_indexed":{ - "type":"string", - "index":"no", - "store":"yes" - }, - "test1" : { - "type":"string", - "index":"analyzed", - "store" : "yes", - "fielddata" : { - "loading" : "eager" - } - }, - "test2" : { - "type" : "token_count", - "store" : "yes", - "index" : "not_analyzed", - "analyzer" : "simple" - } - } - }, - "object1":{ - "properties":{ - "multi1":{ - "type":"multi_field", - "fields":{ - "multi1":{ - "type":"date" - }, - "string":{ - "type":"string", - "index":"not_analyzed" - } - } - } - } - } - } - } -} \ No newline at end of file diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index a0699a35534..76d6ee7757b 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -193,11 +193,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "multi_field") + .field("type", "string") .startObject("fields") - .startObject("name") - .field("type", "string") - .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") @@ -267,11 +264,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "multi_field") + .field("type", "string") .startObject("fields") - .startObject("name") - .field("type", "string") - .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") @@ -808,13 +802,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("name") - .field("type", "multi_field") - .startObject("fields") - .startObject("name") - .field("type", "string") - .field("analyzer", "suggest") - .endObject() - .endObject() + .field("type", "string") + .field("analyzer", "suggest") .endObject() .endObject() .endObject().endObject(); @@ -855,13 +844,8 @@ public class SuggestSearchTests extends ESIntegTestCase { startObject("type1"). startObject("properties"). startObject("name"). - field("type", "multi_field"). - startObject("fields"). - startObject("name"). - field("type", "string"). - field("analyzer", "suggest"). - endObject(). - endObject(). + field("type", "string"). + field("analyzer", "suggest"). endObject(). endObject(). endObject(). diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml index e02b948f936..efdcf15cf89 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml @@ -35,15 +35,6 @@ test_type: properties: text1: - type: multi_field - fields: - text1: - type: string - analyzer: whitespace - text_raw: - type: string - index: not_analyzed - text2: type: string analyzer: whitespace fields: @@ -58,5 +49,3 @@ - match: {test_index.mappings.test_type.properties.text1.type: string} - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.index: not_analyzed} - - match: {test_index.mappings.test_type.properties.text2.type: string} - - match: {test_index.mappings.test_type.properties.text2.fields.text_raw.index: not_analyzed} From 995e796eab99b73c44274e62ee55cf9eba30df30 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 30 Dec 2015 12:07:33 +0100 Subject: [PATCH 279/322] [doc] Fix cross link with ICU plugin Doc bug introduced with #15695 --- .../analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc | 2 +- .../analysis/tokenfilters/cjk-width-tokenfilter.asciidoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc index b5d1b5cde10..c1e278b2183 100644 --- a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc @@ -3,7 +3,7 @@ The `cjk_bigram` token filter forms bigrams out of the CJK terms that are generated by the <> -or the `icu_tokenizer` (see <>). +or the `icu_tokenizer` (see {plugins}/analysis-icu-tokenizer.html[`analysis-icu` plugin]). By default, when a CJK character has no adjacent characters to form a bigram, it is output in unigram form. If you always want to output both unigrams and diff --git a/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc index 4f5d55d4de1..21bde5509a6 100644 --- a/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc @@ -7,6 +7,6 @@ The `cjk_width` token filter normalizes CJK width differences: * Folds halfwidth Katakana variants into the equivalent Kana NOTE: This token filter can be viewed as a subset of NFKC/NFKD -Unicode normalization. See the <> +Unicode normalization. See the {plugins}/analysis-icu-normalization-charfilter.html[`analysis-icu` plugin] for full normalization support. From c4a84b730a89c36aa2bbda59ccae8a80a9460df8 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 30 Dec 2015 17:50:46 +0100 Subject: [PATCH 280/322] Add documentation for Java API create index and put mapping Starting documentation about the admin client. * Create index with settings * put and update mapping. Closes #10816 --- docs/java-api/admin/cluster/index.asciidoc | 10 +++ docs/java-api/admin/index.asciidoc | 18 +++++ .../admin/indices/create-index.asciidoc | 28 +++++++ docs/java-api/admin/indices/index.asciidoc | 16 ++++ .../admin/indices/put-mapping.asciidoc | 76 +++++++++++++++++++ docs/java-api/index.asciidoc | 2 + 6 files changed, 150 insertions(+) create mode 100644 docs/java-api/admin/cluster/index.asciidoc create mode 100644 docs/java-api/admin/index.asciidoc create mode 100644 docs/java-api/admin/indices/create-index.asciidoc create mode 100644 docs/java-api/admin/indices/index.asciidoc create mode 100644 docs/java-api/admin/indices/put-mapping.asciidoc diff --git a/docs/java-api/admin/cluster/index.asciidoc b/docs/java-api/admin/cluster/index.asciidoc new file mode 100644 index 00000000000..395bb794659 --- /dev/null +++ b/docs/java-api/admin/cluster/index.asciidoc @@ -0,0 +1,10 @@ +[[java-admin-cluster]] +=== Cluster Administration + +To access cluster Java API, you need to call `cluster()` method from an <>: + +[source,java] +-------------------------------------------------- +ClusterAdminClient clusterAdminClient = adminClient.cluster(); +-------------------------------------------------- + diff --git a/docs/java-api/admin/index.asciidoc b/docs/java-api/admin/index.asciidoc new file mode 100644 index 00000000000..41599a82c7b --- /dev/null +++ b/docs/java-api/admin/index.asciidoc @@ -0,0 +1,18 @@ +[[java-admin]] +== Java API Administration + +Elasticsearch provides a full Java API to deal with administration tasks. + +To access them, you need to call `admin()` method from a client to get an `AdminClient`: + +[source,java] +-------------------------------------------------- +AdminClient adminClient = client.admin(); +-------------------------------------------------- + +[NOTE] +In the rest of this guide, we will use `client.admin()`. + +include::indices/index.asciidoc[] + +include::cluster/index.asciidoc[] diff --git a/docs/java-api/admin/indices/create-index.asciidoc b/docs/java-api/admin/indices/create-index.asciidoc new file mode 100644 index 00000000000..34b776bd04e --- /dev/null +++ b/docs/java-api/admin/indices/create-index.asciidoc @@ -0,0 +1,28 @@ +[[java-admin-indices-create-index]] +==== Create Index + +Using an <>, you can create an index with all default settings and no mapping: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter").get(); +-------------------------------------------------- + +[float] +[[java-admin-indices-create-index-settings]] +===== Index Settings + +Each index created can have specific settings associated with it. + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter") + .setSettings(Settings.builder() <1> + .put("index.number_of_shards", 3) + .put("index.number_of_replicas", 2) + ) + .get(); <2> +-------------------------------------------------- +<1> Settings for this index +<2> Execute the action and wait for the result + diff --git a/docs/java-api/admin/indices/index.asciidoc b/docs/java-api/admin/indices/index.asciidoc new file mode 100644 index 00000000000..bb9996539ac --- /dev/null +++ b/docs/java-api/admin/indices/index.asciidoc @@ -0,0 +1,16 @@ +[[java-admin-indices]] +=== Indices Administration + +To access indices Java API, you need to call `indices()` method from an <>: + +[source,java] +-------------------------------------------------- +IndicesAdminClient indicesAdminClient = client.admin().indices(); +-------------------------------------------------- + +[NOTE] +In the rest of this guide, we will use `client.admin().indices()`. + +include::create-index.asciidoc[] + +include::put-mapping.asciidoc[] diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc new file mode 100644 index 00000000000..9b085975077 --- /dev/null +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -0,0 +1,76 @@ +[[java-admin-indices-put-mapping]] +==== Put Mapping + +The PUT mapping API allows you to add a new type while creating an index: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter") <1> + .addMapping("tweet", "{\n" + <2> + " \"tweet\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }") + .get(); +-------------------------------------------------- +<1> <> called `twitter` +<2> It also adds a `tweet` mapping type. + + +The PUT mapping API also allows to add a new type to an existing index: + +[source,java] +-------------------------------------------------- +client.admin().indices().preparePutMapping("twitter") <1> + .setType("user") <2> + .setSource("{\n" + <3> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + "}") + .get(); + +// You can also provide the type in the source document +client.admin().indices().preparePutMapping("twitter") + .setType("user") + .setSource("{\n" + + " \"user\":{\n" + <4> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}") + .get(); +-------------------------------------------------- +<1> Puts a mapping on existing index called `twitter` +<2> Adds a `user` mapping type. +<3> This `user` has a predefined type +<4> type can be also provided within the source + +You can use the same API to update an existing mapping: + +[source,java] +-------------------------------------------------- +client.admin().indices().preparePutMapping("twitter") <1> + .setType("tweet") <2> + .setSource("{\n" + <3> + " \"properties\": {\n" + + " \"user_name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + "}") + .get(); +-------------------------------------------------- +<1> Puts a mapping on existing index called `twitter` +<2> Updates the `user` mapping type. +<3> This `user` has now a new field `user_name` + diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 16403d5c147..012633f1e4b 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -147,3 +147,5 @@ include::percolate.asciidoc[] include::query-dsl.asciidoc[] include::indexed-scripts.asciidoc[] + +include::admin/index.asciidoc[] From 157696583169e53c116757167180be0ffdafedce Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 30 Dec 2015 18:12:17 +0100 Subject: [PATCH 281/322] Fix dynamic mapping corner case. Today we fail if the field exists in another type and multi fields are defined in a template. --- .../index/mapper/FieldMapper.java | 4 +- .../index/mapper/DynamicMappingTests.java | 48 +++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9bf58f6107f..b81fdd40c60 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -362,7 +362,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable { public FieldMapper updateFieldType(Map fullNameToFieldType) { final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name()); if (newFieldType == null) { - throw new IllegalStateException(); + // this field does not exist in the mappings yet + // this can happen if this mapper represents a mapping update + return this; } else if (fieldType.getClass() != newFieldType.getClass()) { throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 96e5a2fe80e..22a10ab8229 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -461,6 +461,54 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { } } + public void testMixTemplateMultiFieldAndMappingReuse() throws Exception { + IndexService indexService = createIndex("test"); + XContentBuilder mappings1 = jsonBuilder().startObject() + .startObject("type1") + .startArray("dynamic_templates") + .startObject() + .startObject("template1") + .field("match_mapping_type", "string") + .startObject("mapping") + .field("type", "string") + .startObject("fields") + .startObject("raw") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endArray() + .endObject().endObject(); + indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), true, false); + XContentBuilder mappings2 = jsonBuilder().startObject() + .startObject("type2") + .startObject("properties") + .startObject("field") + .field("type", "string") + .endObject() + .endObject() + .endObject().endObject(); + indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), true, false); + + XContentBuilder json = XContentFactory.jsonBuilder().startObject() + .field("field", "foo") + .endObject(); + SourceToParse source = SourceToParse.source(json.bytes()).id("1"); + DocumentMapper mapper = indexService.mapperService().documentMapper("type1"); + assertNull(mapper.mappers().getMapper("field.raw")); + ParsedDocument parsed = mapper.parse(source); + assertNotNull(parsed.dynamicMappingsUpdate()); + + indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), false, false); + mapper = indexService.mapperService().documentMapper("type1"); + assertNotNull(mapper.mappers().getMapper("field.raw")); + parsed = mapper.parse(source); + assertNull(parsed.dynamicMappingsUpdate()); + } + public void testDefaultFloatingPointMappings() throws IOException { DocumentMapper mapper = createIndex("test").mapperService().documentMapperWithAutoCreate("type").getDocumentMapper(); doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); From a4df067524f91cce3cd5ccda570276145519d0bb Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 30 Dec 2015 18:27:06 +0100 Subject: [PATCH 282/322] Fixes test with wrong similarity type (bm25 => BM25) --- .../index/mapper/update/all_mapping_update_with_conflicts.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json index bbeafa7edc7..6ddde341fc2 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json @@ -10,7 +10,7 @@ "omit_norms": false, "analyzer": "whitespace", "search_analyzer": "standard", - "similarity": "bm25", + "similarity": "BM25", "fielddata": { "format": "paged_bytes" } From cce600ae57b2c3cda00c7a5d91ac4314d4ab41b4 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 30 Dec 2015 19:12:03 +0100 Subject: [PATCH 283/322] Update test which assumes that an unknown similarity type is accepted. --- .../elasticsearch/index/mapper/all/SimpleAllMapperTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 341ba25fd9f..ae1560d6608 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -258,7 +258,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } tv_stored |= tv_positions || tv_payloads || tv_offsets; if (randomBoolean()) { - mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : "TF/IDF"); + mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : null); } mappingBuilder.endObject(); } @@ -296,7 +296,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } else { assertThat(field, nullValue()); } - if (similarity == null || similarity.equals("TF/IDF")) { + if (similarity == null) { assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue()); } else { assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name())); From 4986817c6d9401585b6931a84044b0815c60be3a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 30 Dec 2015 19:55:47 +0100 Subject: [PATCH 284/322] Fix NPE --- .../elasticsearch/index/mapper/all/SimpleAllMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index ae1560d6608..acd0cde76d6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -258,7 +258,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } tv_stored |= tv_positions || tv_payloads || tv_offsets; if (randomBoolean()) { - mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : null); + mappingBuilder.field("similarity", similarity = "BM25"); } mappingBuilder.endObject(); } From 3f9c0fbb58ad2120cbc800d424f4365be287e349 Mon Sep 17 00:00:00 2001 From: Dave Date: Mon, 14 Dec 2015 09:07:38 -0800 Subject: [PATCH 285/322] MapperService: check index.mapper.dynamic during index creation The MapperService doesn't currently check the index.mapper.dynamic setting during index creation, so indices can be created with dynamic mappings even if this setting is false. Add a check that throws an exception in this case. Fixes #15381 --- .../action/support/AutoCreateIndex.java | 5 +- .../index/mapper/MapperService.java | 4 +- .../mapper/DynamicMappingDisabledTests.java | 114 ++++++++++++++++++ .../indices/settings/GetSettingsBlocksIT.java | 5 +- 4 files changed, 124 insertions(+), 4 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index 7d5fe3aa023..93c96b24be3 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; /** * Encapsulates the logic of whether a new index should be automatically created when @@ -35,6 +36,7 @@ public final class AutoCreateIndex { private final boolean needToCheck; private final boolean globallyDisabled; + private final boolean dynamicMappingDisabled; private final String[] matches; private final String[] matches2; private final IndexNameExpressionResolver resolver; @@ -42,6 +44,7 @@ public final class AutoCreateIndex { @Inject public AutoCreateIndex(Settings settings, IndexNameExpressionResolver resolver) { this.resolver = resolver; + dynamicMappingDisabled = !settings.getAsBoolean(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_DEFAULT); String value = settings.get("action.auto_create_index"); if (value == null || Booleans.isExplicitTrue(value)) { needToCheck = true; @@ -82,7 +85,7 @@ public final class AutoCreateIndex { if (exists) { return false; } - if (globallyDisabled) { + if (globallyDisabled || dynamicMappingDisabled) { return false; } // matches not set, default value of "true" diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 59724913bad..5da06f2901f 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -78,6 +78,8 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; public class MapperService extends AbstractIndexComponent implements Closeable { public static final String DEFAULT_MAPPING = "_default_"; + public static final String INDEX_MAPPER_DYNAMIC_SETTING = "index.mapper.dynamic"; + public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" @@ -124,7 +126,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); this.mapperRegistry = mapperRegistry; - this.dynamic = this.indexSettings.getSettings().getAsBoolean("index.mapper.dynamic", true); + this.dynamic = this.indexSettings.getSettings().getAsBoolean(INDEX_MAPPER_DYNAMIC_SETTING, INDEX_MAPPER_DYNAMIC_DEFAULT); defaultPercolatorMappingSource = "{\n" + "\"_default_\":{\n" + "\"properties\" : {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java new file mode 100644 index 00000000000..f6cfcec041a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.cluster.TestClusterService; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import static org.hamcrest.CoreMatchers.instanceOf; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { + + private static ThreadPool THREAD_POOL; + private TestClusterService clusterService; + private LocalTransport transport; + private TransportService transportService; + private IndicesService indicesService; + private ShardStateAction shardStateAction; + private ActionFilters actionFilters; + private IndexNameExpressionResolver indexNameExpressionResolver; + private AutoCreateIndex autoCreateIndex; + private Settings settings; + + @BeforeClass + public static void createThreadPool() { + THREAD_POOL = new ThreadPool("DynamicMappingDisabledTests"); + } + + @Override + public void setUp() throws Exception { + super.setUp(); + settings = Settings.builder() + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false) + .build(); + clusterService = new TestClusterService(THREAD_POOL); + transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); + transportService = new TransportService(transport, THREAD_POOL); + indicesService = getInstanceFromNode(IndicesService.class); + shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null); + actionFilters = new ActionFilters(Collections.emptySet()); + indexNameExpressionResolver = new IndexNameExpressionResolver(settings); + autoCreateIndex = new AutoCreateIndex(settings, indexNameExpressionResolver); + } + + @AfterClass + public static void destroyThreadPool() { + ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); + // since static must set to null to be eligible for collection + THREAD_POOL = null; + } + + public void testDynamicDisabled() { + TransportIndexAction action = new TransportIndexAction(settings, transportService, clusterService, + indicesService, THREAD_POOL, shardStateAction, null, null, actionFilters, indexNameExpressionResolver, + autoCreateIndex); + + IndexRequest request = new IndexRequest("index", "type", "1"); + request.source("foo", 3); + final AtomicBoolean onFailureCalled = new AtomicBoolean(); + + action.execute(request, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + fail("Indexing request should have failed"); + } + + @Override + public void onFailure(Throwable e) { + onFailureCalled.set(true); + assertThat(e, instanceOf(IndexNotFoundException.class)); + assertEquals(e.getMessage(), "no such index"); + } + }); + + assertTrue(onFailureCalled.get()); + } +} diff --git a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java index a2a7c5fb493..4886ee0886b 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.settings; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -40,7 +41,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) .put("index.merge.policy.expunge_deletes_allowed", "30") - .put("index.mapper.dynamic", false))); + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false))); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { @@ -49,7 +50,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { assertThat(response.getIndexToSettings().size(), greaterThanOrEqualTo(1)); assertThat(response.getSetting("test", "index.refresh_interval"), equalTo("-1")); assertThat(response.getSetting("test", "index.merge.policy.expunge_deletes_allowed"), equalTo("30")); - assertThat(response.getSetting("test", "index.mapper.dynamic"), equalTo("false")); + assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING), equalTo("false")); } finally { disableIndexBlock("test", block); } From e80f50bca8f11c99098a1e65ee81c74eaed1a425 Mon Sep 17 00:00:00 2001 From: mysqlguru Date: Thu, 31 Dec 2015 15:16:56 +0900 Subject: [PATCH 286/322] field name is changed `exact_value` is changed to `full_text` at the second (3) example. --- docs/reference/query-dsl/term-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/term-query.asciidoc b/docs/reference/query-dsl/term-query.asciidoc index a34ae5b0a68..85608ca3aa5 100644 --- a/docs/reference/query-dsl/term-query.asciidoc +++ b/docs/reference/query-dsl/term-query.asciidoc @@ -137,7 +137,7 @@ GET my_index/my_type/_search { "query": { "term": { - "exact_value": "foxes" <3> + "full_text": "foxes" <3> } } } From d5191518ba37fa80aa6581c64bad2660286a2c71 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 31 Dec 2015 12:10:48 +0100 Subject: [PATCH 287/322] Fix thirdPartyAudit check on Windows in case of jar hell with JDK --- .../elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 3dda6b186ac..5d06103789f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -244,7 +244,7 @@ public class ThirdPartyAuditTask extends AntTask { Files.walkFileTree(root, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - String entry = root.relativize(file).toString(); + String entry = root.relativize(file).toString().replace('\\', '/'); if (entry.endsWith(".class")) { if (ext.getResource(entry) != null) { sheistySet.add(entry); From 20d198fd07ad9782617626d913d704e3d1a8b62a Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 31 Dec 2015 14:26:30 +0100 Subject: [PATCH 288/322] Add documentation for Java API update/get settings API Closes #10941. --- .../admin/indices/get-settings.asciidoc | 22 +++++++++++++++++++ docs/java-api/admin/indices/index.asciidoc | 3 +++ .../admin/indices/update-settings.asciidoc | 16 ++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 docs/java-api/admin/indices/get-settings.asciidoc create mode 100644 docs/java-api/admin/indices/update-settings.asciidoc diff --git a/docs/java-api/admin/indices/get-settings.asciidoc b/docs/java-api/admin/indices/get-settings.asciidoc new file mode 100644 index 00000000000..844aaf65ec9 --- /dev/null +++ b/docs/java-api/admin/indices/get-settings.asciidoc @@ -0,0 +1,22 @@ +[[java-admin-indices-get-settings]] +==== Get Settings + +The get settings API allows to retrieve settings of index/indices: + +[source,java] +-------------------------------------------------- +GetSettingsResponse response = client.admin().indices() + .prepareGetSettings("company", "employee").get(); <1> +for (ObjectObjectCursor cursor : response.getIndexToSettings()) { <2> + String index = cursor.key; <3> + Settings settings = cursor.value; <4> + Integer shards = settings.getAsInt("index.number_of_shards", null); <5> + Integer replicas = settings.getAsInt("index.number_of_replicas", null); <6> +} +-------------------------------------------------- +<1> Get settings for indices `company` and `employee` +<2> Iterate over results +<3> Index name +<4> Settings for the given index +<5> Number of shards for this index +<6> Number of replicas for this index diff --git a/docs/java-api/admin/indices/index.asciidoc b/docs/java-api/admin/indices/index.asciidoc index bb9996539ac..83645e6c3c2 100644 --- a/docs/java-api/admin/indices/index.asciidoc +++ b/docs/java-api/admin/indices/index.asciidoc @@ -14,3 +14,6 @@ In the rest of this guide, we will use `client.admin().indices()`. include::create-index.asciidoc[] include::put-mapping.asciidoc[] + +include::get-settings.asciidoc[] +include::update-settings.asciidoc[] diff --git a/docs/java-api/admin/indices/update-settings.asciidoc b/docs/java-api/admin/indices/update-settings.asciidoc new file mode 100644 index 00000000000..9c2cba2adf0 --- /dev/null +++ b/docs/java-api/admin/indices/update-settings.asciidoc @@ -0,0 +1,16 @@ +[[java-admin-indices-update-settings]] +==== Update Indices Settings + +You can change index settings by calling: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareUpdateSettings("twitter") <1> + .setSettings(Settings.builder() <2> + .put("index.number_of_replicas", 0) + ) + .get(); +-------------------------------------------------- +<1> Index to update +<2> Settings + From 76c3797fd28de8b85062be453555b3ec6d14becd Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 30 Dec 2015 19:08:47 +0100 Subject: [PATCH 289/322] Add documentation for Java API refresh API Closes #10942. --- docs/java-api/admin/indices/index.asciidoc | 2 ++ docs/java-api/admin/indices/refresh.asciidoc | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 docs/java-api/admin/indices/refresh.asciidoc diff --git a/docs/java-api/admin/indices/index.asciidoc b/docs/java-api/admin/indices/index.asciidoc index 83645e6c3c2..bbd365076c7 100644 --- a/docs/java-api/admin/indices/index.asciidoc +++ b/docs/java-api/admin/indices/index.asciidoc @@ -15,5 +15,7 @@ include::create-index.asciidoc[] include::put-mapping.asciidoc[] +include::refresh.asciidoc[] + include::get-settings.asciidoc[] include::update-settings.asciidoc[] diff --git a/docs/java-api/admin/indices/refresh.asciidoc b/docs/java-api/admin/indices/refresh.asciidoc new file mode 100644 index 00000000000..856c270daf3 --- /dev/null +++ b/docs/java-api/admin/indices/refresh.asciidoc @@ -0,0 +1,19 @@ +[[java-admin-indices-refresh]] +==== Refresh + +The refresh API allows to explicitly refresh one or more index: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareRefresh().get(); <1> +client.admin().indices() + .prepareRefresh("twitter") <2> + .get(); +client.admin().indices() + .prepareRefresh("twitter", "company") <3> + .get(); +-------------------------------------------------- +<1> Refresh all indices +<2> Refresh one index +<3> Refresh many indices + From 76ab9bf6c98bd3bdbccc52c96e4aeb04ba969261 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 30 Dec 2015 19:29:21 +0100 Subject: [PATCH 290/322] Add documentation for Java API health API Closes #10818. --- docs/java-api/admin/cluster/health.asciidoc | 76 +++++++++++++++++++++ docs/java-api/admin/cluster/index.asciidoc | 7 +- 2 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 docs/java-api/admin/cluster/health.asciidoc diff --git a/docs/java-api/admin/cluster/health.asciidoc b/docs/java-api/admin/cluster/health.asciidoc new file mode 100644 index 00000000000..7d20fdde6a3 --- /dev/null +++ b/docs/java-api/admin/cluster/health.asciidoc @@ -0,0 +1,76 @@ +[[java-admin-cluster-health]] +==== Cluster Health + +[[java-admin-cluster-health-health]] +===== Health + +The cluster health API allows to get a very simple status on the health of the cluster and also can give you +some technical information about the cluster status per index: + +[source,java] +-------------------------------------------------- +ClusterHealthResponse healths = client.admin().cluster().prepareHealth().get(); <1> +String clusterName = healths.getClusterName(); <2> +int numberOfDataNodes = healths.getNumberOfDataNodes(); <3> +int numberOfNodes = healths.getNumberOfNodes(); <4> + +for (ClusterIndexHealth health : healths) { <5> + String index = health.getIndex(); <6> + int numberOfShards = health.getNumberOfShards(); <7> + int numberOfReplicas = health.getNumberOfReplicas(); <8> + ClusterHealthStatus status = health.getStatus(); <9> +} +-------------------------------------------------- +<1> Get information for all indices +<2> Access the cluster name +<3> Get the total number of data nodes +<4> Get the total number of nodes +<5> Iterate over all indices +<6> Index name +<7> Number of shards +<8> Number of replicas +<9> Index status + +[[java-admin-cluster-health-wait-status]] +===== Wait for status + +You can use the cluster health API to wait for a specific status for the whole cluster or for a given index: + +[source,java] +-------------------------------------------------- +client.admin().cluster().prepareHealth() <1> + .setWaitForYellowStatus() <2> + .get(); +client.admin().cluster().prepareHealth("company") <3> + .setWaitForGreenStatus() <4> + .get(); + +client.admin().cluster().prepareHealth("employee") <5> + .setWaitForGreenStatus() <6> + .setTimeout(TimeValue.timeValueSeconds(2)) <7> + .get(); +-------------------------------------------------- +<1> Prepare a health request +<2> Wait for the cluster being yellow +<3> Prepare the health request for index `company` +<4> Wait for the index being green +<5> Prepare the health request for index `employee` +<6> Wait for the index being green +<7> Wait at most for 2 seconds + +If the index does not have the expected status and you want to fail in that case, you need +to explicitly interpret the result: + +[source,java] +-------------------------------------------------- +ClusterHealthResponse response = client.admin().cluster().prepareHealth("company") + .setWaitForGreenStatus() <1> + .get(); + +ClusterHealthStatus status = response.getIndices().get("company").getStatus(); +if (!status.equals(ClusterHealthStatus.GREEN)) { + throw new RuntimeException("Index is in " + status + " state"); <2> +} +-------------------------------------------------- +<1> Wait for the index being green +<2> Throw an exception if not `GREEN` diff --git a/docs/java-api/admin/cluster/index.asciidoc b/docs/java-api/admin/cluster/index.asciidoc index 395bb794659..e5525e18a49 100644 --- a/docs/java-api/admin/cluster/index.asciidoc +++ b/docs/java-api/admin/cluster/index.asciidoc @@ -5,6 +5,11 @@ To access cluster Java API, you need to call `cluster()` method from an < Date: Tue, 29 Dec 2015 16:09:00 -0600 Subject: [PATCH 291/322] Fix multi-field support for GeoPoint types This commit fixes multiField support for GeoPointFieldMapper by passing an externalValueContext to the multiField parser. Unit testing is added for multi field coverage. --- .../mapper/geo/BaseGeoPointFieldMapper.java | 2 +- .../mapper/geo/GeoPointFieldMapperTests.java | 30 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 11683ee9a8d..52202fac716 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -412,7 +412,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr latMapper.parse(context.createExternalValueContext(point.lat())); lonMapper.parse(context.createExternalValueContext(point.lon())); } - multiFields.parse(this, context); + multiFields.parse(this, context.createExternalValueContext(point)); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index af39f45fa8d..d89e27ffb69 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -37,6 +38,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.geo.RandomGeoGenerator; import java.util.List; import java.util.Map; @@ -787,4 +789,32 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { assertEquals("dr5regy6rc6y".substring(0, numHashes-i), hashes.get(i)); } } + + public void testMultiField() throws Exception { + int numDocs = randomIntBetween(10, 100); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") + .field("type", "geo_point").startObject("fields") + .startObject("geohash").field("type", "geo_point").field("geohash_precision", 12).field("geohash_prefix", true).endObject() + .startObject("latlon").field("type", "geo_point").field("lat_lon", true).endObject().endObject() + .endObject().endObject().endObject().endObject().string(); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") + .addMapping("pin", mapping); + mappingRequest.execute().actionGet(); + + // create index and add random test points + client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); + for (int i=0; i Date: Thu, 31 Dec 2015 10:56:04 -0600 Subject: [PATCH 292/322] Reconcile GeoPoint toString and fromString methods GeoPoint.toString prints as a json array of values, but resetFromString expects comma delimited. This commit reconciles the methods. --- .../main/java/org/elasticsearch/common/geo/GeoPoint.java | 2 +- .../elasticsearch/index/mapper/core/StringFieldMapper.java | 2 +- .../index/mapper/multifield/MultiFieldsIntegrationIT.java | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 7130537fceb..513a7977d67 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -146,7 +146,7 @@ public final class GeoPoint { @Override public String toString() { - return "[" + lat + ", " + lon + "]"; + return lat + ", " + lon; } public static GeoPoint parseFromLatLon(String latLon) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index d0cb0b4dd1c..cf24e044f0f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -338,7 +338,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc */ public static ValueAndBoost parseCreateFieldForString(ParseContext context, String nullValue, float defaultBoost) throws IOException { if (context.externalValueSet()) { - return new ValueAndBoost((String) context.externalValue(), defaultBoost); + return new ValueAndBoost(context.externalValue().toString(), defaultBoost); } XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java index 0c26324ac6c..e4892583cf8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.multifield; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -121,12 +122,13 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { assertThat(bField.get("type").toString(), equalTo("string")); assertThat(bField.get("index").toString(), equalTo("not_analyzed")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "51,19").setRefresh(true).get(); + GeoPoint point = new GeoPoint(51, 19); + client().prepareIndex("my-index", "my-type", "1").setSource("a", point.toString()).setRefresh(true).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) .get(); assertThat(countResponse.getHits().totalHits(), equalTo(1l)); - countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "51,19")).get(); + countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.toString())).get(); assertThat(countResponse.getHits().totalHits(), equalTo(1l)); } From 61b701402390337cce2d1192a756e94d7ca89325 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 1 Jan 2016 19:52:24 -0500 Subject: [PATCH 293/322] Null parameter checks in BulkProcessor.Builder#builder --- .../java/org/elasticsearch/action/bulk/BulkProcessor.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index c4d2cd6af2d..78a0c76702f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.FutureUtils; import java.io.Closeable; +import java.util.Objects; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -167,9 +168,8 @@ public class BulkProcessor implements Closeable { } public static Builder builder(Client client, Listener listener) { - if (client == null) { - throw new NullPointerException("The client you specified while building a BulkProcessor is null"); - } + Objects.requireNonNull(client, "client"); + Objects.requireNonNull(listener, "listener"); return new Builder(client, listener); } From 265f8f85120ce618862b3aa97fc3cc3c97a4c35e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 2 Jan 2016 18:38:40 -0500 Subject: [PATCH 294/322] Address unchecked warnings in o/e/c/c/Cache.java --- core/src/main/java/org/elasticsearch/common/cache/Cache.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/cache/Cache.java b/core/src/main/java/org/elasticsearch/common/cache/Cache.java index a5b071c848b..0060a71f974 100644 --- a/core/src/main/java/org/elasticsearch/common/cache/Cache.java +++ b/core/src/main/java/org/elasticsearch/common/cache/Cache.java @@ -296,7 +296,7 @@ public class Cache { } public static final int NUMBER_OF_SEGMENTS = 256; - private final CacheSegment[] segments = new CacheSegment[NUMBER_OF_SEGMENTS]; + @SuppressWarnings("unchecked") private final CacheSegment[] segments = new CacheSegment[NUMBER_OF_SEGMENTS]; { for (int i = 0; i < segments.length; i++) { @@ -432,7 +432,7 @@ public class Cache { promote(tuple.v1(), now); } if (replaced) { - removalListener.onRemoval(new RemovalNotification(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED)); + removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED)); } } From 110fe6d36abf590f69cc592a01816e9e312e70b7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 3 Jan 2016 11:44:03 -0500 Subject: [PATCH 295/322] Remove unused constant from o.e.c.n.DiscoveryNode --- .../elasticsearch/cluster/node/DiscoveryNode.java | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 9f0a33ec4cf..7dce2172879 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -45,12 +45,6 @@ import static org.elasticsearch.common.transport.TransportAddressSerializers.add */ public class DiscoveryNode implements Streamable, ToXContent { - /** - * Minimum version of a node to communicate with. This version corresponds to the minimum compatibility version - * of the current elasticsearch major version. - */ - public static final Version MINIMUM_DISCOVERY_NODE_VERSION = Version.CURRENT.minimumCompatibilityVersion(); - public static boolean localNode(Settings settings) { if (settings.get("node.local") != null) { return settings.getAsBoolean("node.local", false); @@ -109,7 +103,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode} *

        - * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -126,7 +120,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode} *

        - * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -145,7 +139,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode}. *

        - * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -178,7 +172,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode}. *

        - * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. From 6a12b5e59a14681847b33091612bcf3f136e07de Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 3 Jan 2016 12:30:24 -0500 Subject: [PATCH 296/322] Cleanup o/e/c/ClusterStateObserver.java This commit applies a minor code cleanup to o/e/c/ClusterStateObserver.java. In particular - employ the diamond operator instead of explicitly specifying a generic type parameter - use 'L' instead of 'l' for specifying a long literal - remove redundant static modifier on a nested interface - remove redundant public access modifiers on interface methods - reformat the declaration of the four-argument ChangePredicate#apply - simplify the bodies of ValidationPredicate#apply --- .../cluster/ClusterStateObserver.java | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index a035cf7c368..df857623570 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -50,7 +50,7 @@ public class ClusterStateObserver { final AtomicReference lastObservedState; final TimeoutClusterStateListener clusterStateListener = new ObserverClusterStateListener(); // observingContext is not null when waiting on cluster state changes - final AtomicReference observingContext = new AtomicReference(null); + final AtomicReference observingContext = new AtomicReference<>(null); volatile Long startTimeNS; volatile boolean timedOut; @@ -117,7 +117,7 @@ public class ClusterStateObserver { if (timeOutValue != null) { long timeSinceStartMS = TimeValue.nsecToMSec(System.nanoTime() - startTimeNS); timeoutTimeLeftMS = timeOutValue.millis() - timeSinceStartMS; - if (timeoutTimeLeftMS <= 0l) { + if (timeoutTimeLeftMS <= 0L) { // things have timeout while we were busy -> notify logger.trace("observer timed out. notifying listener. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry @@ -238,7 +238,7 @@ public class ClusterStateObserver { } } - public static interface Listener { + public interface Listener { /** called when a new state is observed */ void onNewClusterState(ClusterState state); @@ -256,15 +256,17 @@ public class ClusterStateObserver { * * @return true if newState should be accepted */ - public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, - ClusterState newState, ClusterState.ClusterStateStatus newStatus); + boolean apply(ClusterState previousState, + ClusterState.ClusterStateStatus previousStatus, + ClusterState newState, + ClusterState.ClusterStateStatus newStatus); /** * called to see whether a cluster change should be accepted * * @return true if changedEvent.state() should be accepted */ - public boolean apply(ClusterChangedEvent changedEvent); + boolean apply(ClusterChangedEvent changedEvent); } @@ -272,20 +274,14 @@ public class ClusterStateObserver { @Override public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState, ClusterState.ClusterStateStatus newStatus) { - if (previousState != newState || previousStatus != newStatus) { - return validate(newState); - } - return false; + return (previousState != newState || previousStatus != newStatus) && validate(newState); } protected abstract boolean validate(ClusterState newState); @Override public boolean apply(ClusterChangedEvent changedEvent) { - if (changedEvent.previousState().version() != changedEvent.state().version()) { - return validate(changedEvent.state()); - } - return false; + return changedEvent.previousState().version() != changedEvent.state().version() && validate(changedEvent.state()); } } From a70f76f76305e5bdc8161f9836d6850f75442aec Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 3 Jan 2016 18:18:07 -0500 Subject: [PATCH 297/322] Make cluster state external to o.e.c.a.s.ShardStateAction This commit modifies the handling of cluster states in o.e.c.a.s.ShardStateAction so that all necessary state is obtained externally to the ShardStateAction#shardFailed and ShardStateAction#shardStarted methods. This refactoring permits the removal of the ClusterService field from ShardStateAction. --- .../TransportReplicationAction.java | 2 +- .../action/shard/ShardStateAction.java | 102 +++++++++--------- .../cluster/IndicesClusterStateService.java | 17 ++- .../action/shard/ShardStateActionTests.java | 6 +- 4 files changed, 66 insertions(+), 61 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d96ec177366..80ac93e981b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -882,7 +882,7 @@ public abstract class TransportReplicationAction { + private final ClusterService clusterService; + + public ShardFailedTransportHandler(ClusterService clusterService) { + this.clusterService = clusterService; + } + @Override public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { handleShardFailureOnMaster(request, new ClusterStateTaskListener() { @@ -156,6 +160,16 @@ public class ShardStateAction extends AbstractComponent { } ); } + + private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry, ClusterStateTaskListener listener) { + logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); + clusterService.submitStateUpdateTask( + "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.HIGH), + shardFailedClusterStateHandler, + listener); + } } class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { @@ -194,43 +208,46 @@ public class ShardStateAction extends AbstractComponent { private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); - private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry, ClusterStateTaskListener listener) { - logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - clusterService.submitStateUpdateTask( - "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.HIGH), - shardFailedClusterStateHandler, - listener); - } - - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason) { - DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); + public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { + DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { - logger.warn("{} can't send shard started for {}, no master known.", shardRouting.shardId(), shardRouting); + logger.warn("no master known to start shard [{}]", shardRouting); return; } - shardStarted(shardRouting, indexUUID, reason, masterNode); - } - - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason, final DiscoveryNode masterNode) { ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); - logger.debug("{} sending shard started for {}", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); + logger.debug("sending start shard [{}]", shardRoutingEntry); transportService.sendRequest(masterNode, SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { - logger.warn("failed to send shard started to [{}]", exp, masterNode); + logger.warn("failure sending start shard [{}] to [{}]", exp, masterNode, shardRouting); } }); } class ShardStartedTransportHandler implements TransportRequestHandler { + private final ClusterService clusterService; + + public ShardStartedTransportHandler(ClusterService clusterService) { + this.clusterService = clusterService; + } + @Override public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { handleShardStartedOnMaster(request); channel.sendResponse(TransportResponse.Empty.INSTANCE); } + + private void handleShardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { + logger.debug("received shard started for {}", shardRoutingEntry); + + clusterService.submitStateUpdateTask( + "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.URGENT), + shardStartedClusterStateHandler, + shardStartedClusterStateHandler); + } } class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { @@ -264,17 +281,6 @@ public class ShardStateAction extends AbstractComponent { private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = new ShardStartedClusterStateHandler(); - private void handleShardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.debug("received shard started for {}", shardRoutingEntry); - - clusterService.submitStateUpdateTask( - "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.URGENT), - shardStartedClusterStateHandler, - shardStartedClusterStateHandler); - } - public static class ShardRoutingEntry extends TransportRequest { ShardRouting shardRouting; String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 099b7f862cd..8a213898b6c 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -459,7 +459,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { try { if (indexShard.recoverFromStore(nodes.localNode())) { - shardStateAction.shardStarted(shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); + shardStateAction.shardStarted(state, shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); } } catch (Throwable t) { handleRecoveryFailure(indexService, shardRouting, true, t); @@ -666,7 +665,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent Date: Mon, 4 Jan 2016 11:55:24 +0900 Subject: [PATCH 298/322] add double quotation --- docs/reference/aggregations/misc.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/misc.asciidoc b/docs/reference/aggregations/misc.asciidoc index 074a6eaaca3..1be8373c1ee 100644 --- a/docs/reference/aggregations/misc.asciidoc +++ b/docs/reference/aggregations/misc.asciidoc @@ -44,7 +44,7 @@ Consider this example where we want to associate the color blue with our `terms` -------------------------------------------------- { ... - aggs": { + "aggs": { "titles": { "terms": { "field": "title" From 01a7075ea847a7476c16697538431d5f639658e7 Mon Sep 17 00:00:00 2001 From: Devi Date: Mon, 4 Jan 2016 10:19:46 +0530 Subject: [PATCH 299/322] fix syntax in the request params --- docs/reference/search/search-template.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index 77670acafb1..76262644208 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -365,7 +365,7 @@ Pre-registered templates can also be rendered using GET /_render/template/ { "params": { - "... + "..." } } ------------------------------------------ From c934f859c71176a3e73b173c47a64742ab7b3b43 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 4 Jan 2016 11:13:17 +0100 Subject: [PATCH 300/322] Make `missing` on terms aggs work with all execution modes. There are two bugs: - the 'global_ordinals_low_cardinality' mode requires a fielddata-based impl so that it can extract the segment to global ordinal mapping - the 'global_ordinals_hash' mode abusively casts to the values source to a fielddata-based impl while it is not needed Closes #14882 --- .../GlobalOrdinalsStringTermsAggregator.java | 13 ++++----- .../bucket/terms/TermsAggregatorFactory.java | 7 +++-- .../search/aggregations/MissingValueIT.java | 29 ++++++++++++------- 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 6598f6dbdbb..1e7a0047ea5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -267,9 +267,9 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr private final LongHash bucketOrds; - public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, + public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, - Aggregator parent, SubAggCollectionMode collectionMode, + Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode, @@ -341,7 +341,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr private RandomAccessOrds segmentOrds; public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, - Terms.Order order, + Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, Map metaData) throws IOException { @@ -411,11 +411,10 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr // This is the cleanest way I can think of so far GlobalOrdinalMapping mapping; - if (globalOrds instanceof GlobalOrdinalMapping) { - mapping = (GlobalOrdinalMapping) globalOrds; - } else { - assert globalOrds.getValueCount() == segmentOrds.getValueCount(); + if (globalOrds.getValueCount() == segmentOrds.getValueCount()) { mapping = null; + } else { + mapping = (GlobalOrdinalMapping) globalOrds; } for (long i = 1; i < segmentDocCounts.size(); i++) { // We use set(...) here, because we need to reset the slow to 0. diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index a9d6a9334bb..270dc009af2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -94,7 +94,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - if (includeExclude != null || factories.count() > 0) { + if (includeExclude != null || factories.count() > 0 + // we need the FieldData impl to be able to extract the + // segment to global ord mapping + || valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) { return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java index f2a78295664..63008bc501f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; @@ -68,18 +69,24 @@ public class MissingValueIT extends ESIntegTestCase { } public void testStringTerms() { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("bar")).get(); - assertSearchResponse(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - assertEquals(1, terms.getBucketByKey("foo").getDocCount()); - assertEquals(1, terms.getBucketByKey("bar").getDocCount()); + for (ExecutionMode mode : ExecutionMode.values()) { + SearchResponse response = client().prepareSearch("idx").addAggregation( + terms("my_terms") + .field("str") + .executionHint(mode.toString()) + .missing("bar")).get(); + assertSearchResponse(response); + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(1, terms.getBucketByKey("foo").getDocCount()); + assertEquals(1, terms.getBucketByKey("bar").getDocCount()); - response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); - assertSearchResponse(response); - terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); + assertSearchResponse(response); + terms = response.getAggregations().get("my_terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + } } public void testLongTerms() { From 1a131cf917e99567850baab4a8ad3a400aff2839 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 10:44:41 -0500 Subject: [PATCH 301/322] Inline ShardStateAction#innerShardFailed method --- .../cluster/action/shard/ShardStateAction.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 6575d0f0279..f7318761915 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -83,16 +83,12 @@ public class ShardStateAction extends AbstractComponent { shardFailed(clusterState, shardRouting, indexUUID, message, failure, null, listener); } - public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { - innerShardFailed(clusterState, shardRouting, indexUUID, message, failure, timeout, listener); - } - public void resendShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { logger.trace("re-sending failed shard [{}], index UUID [{}], reason [{}]", failure, shardRouting, indexUUID, message); shardFailed(clusterState, shardRouting, indexUUID, message, failure, listener); } - private void innerShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, final Throwable failure, TimeValue timeout, Listener listener) { + public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { logger.warn("no master known to fail shard [{}]", shardRouting); From 754bd66b63980efd838358029bd375afbae5498d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 10:46:45 -0500 Subject: [PATCH 302/322] Always log shard ID in ShardStateAction This commit restores logging the ShardRouting#shardId at the front of the log messages in ShardStateAction. The reason for this is so that shard-level log messages have the format "[component][node][shard] message". --- .../action/shard/ShardStateAction.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index f7318761915..84503cbcf17 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -84,14 +84,14 @@ public class ShardStateAction extends AbstractComponent { } public void resendShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - logger.trace("re-sending failed shard [{}], index UUID [{}], reason [{}]", failure, shardRouting, indexUUID, message); + logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); shardFailed(clusterState, shardRouting, indexUUID, message, failure, listener); } public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { - logger.warn("no master known to fail shard [{}]", shardRouting); + logger.warn("{} no master known to fail shard [{}]", shardRouting.shardId(), shardRouting); listener.onShardFailedNoMaster(); return; } @@ -109,7 +109,7 @@ public class ShardStateAction extends AbstractComponent { @Override public void handleException(TransportException exp) { - logger.warn("unexpected failure while sending request to [{}] to fail shard [{}]", exp, masterNode, shardRoutingEntry); + logger.warn("{} unexpected failure while sending request to [{}] to fail shard [{}]", exp, shardRoutingEntry.shardRouting.shardId(), masterNode, shardRoutingEntry); listener.onShardFailedFailure(masterNode, exp); } }); @@ -127,21 +127,21 @@ public class ShardStateAction extends AbstractComponent { handleShardFailureOnMaster(request, new ClusterStateTaskListener() { @Override public void onFailure(String source, Throwable t) { - logger.error("unexpected failure while failing shard [{}]", t, request.shardRouting); + logger.error("{} unexpected failure while failing shard [{}]", t, request.shardRouting.shardId(), request.shardRouting); try { channel.sendResponse(t); } catch (Throwable channelThrowable) { - logger.warn("failed to send failure [{}] while failing shard [{}]", channelThrowable, t, request.shardRouting); + logger.warn("{} failed to send failure [{}] while failing shard [{}]", channelThrowable, t, request.shardRouting.shardId(), request.shardRouting); } } @Override public void onNoLongerMaster(String source) { - logger.error("no longer master while failing shard [{}]", request.shardRouting); + logger.error("{} no longer master while failing shard [{}]", request.shardRouting.shardId(), request.shardRouting); try { channel.sendResponse(new NotMasterException(source)); } catch (Throwable channelThrowable) { - logger.warn("failed to send no longer master while failing shard [{}]", channelThrowable, request.shardRouting); + logger.warn("{} failed to send no longer master while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), request.shardRouting); } } @@ -150,7 +150,7 @@ public class ShardStateAction extends AbstractComponent { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Throwable channelThrowable) { - logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting); + logger.warn("{} failed to send response while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), request.shardRouting); } } } @@ -158,7 +158,7 @@ public class ShardStateAction extends AbstractComponent { } private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry, ClusterStateTaskListener listener) { - logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); + logger.warn("{} received shard failed for [{}]", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); clusterService.submitStateUpdateTask( "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", shardRoutingEntry, @@ -207,7 +207,7 @@ public class ShardStateAction extends AbstractComponent { public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { - logger.warn("no master known to start shard [{}]", shardRouting); + logger.warn("{} no master known to start shard [{}]", shardRouting.shardId(), shardRouting); return; } ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); @@ -216,7 +216,7 @@ public class ShardStateAction extends AbstractComponent { SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { - logger.warn("failure sending start shard [{}] to [{}]", exp, masterNode, shardRouting); + logger.warn("{} failure sending start shard [{}] to [{}]", exp, shardRouting.shardId(), masterNode, shardRouting); } }); } @@ -235,7 +235,7 @@ public class ShardStateAction extends AbstractComponent { } private void handleShardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.debug("received shard started for {}", shardRoutingEntry); + logger.debug("{} received shard started for [{}]", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); clusterService.submitStateUpdateTask( "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", From fdb0c909ecdef6c86314b6fe69dedfea61bfe3c1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 11:07:42 -0500 Subject: [PATCH 303/322] Inline Shard(.*)TransportHandler#handleShard\1OnMaster methods --- .../action/shard/ShardStateAction.java | 31 +++++++------------ 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 84503cbcf17..7a2c3a444fc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -124,7 +124,13 @@ public class ShardStateAction extends AbstractComponent { @Override public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - handleShardFailureOnMaster(request, new ClusterStateTaskListener() { + logger.warn("{} received shard failed for {}", request.failure, request.shardRouting.shardId(), request); + clusterService.submitStateUpdateTask( + "shard-failed (" + request.shardRouting + "), message [" + request.message + "]", + request, + ClusterStateTaskConfig.build(Priority.HIGH), + shardFailedClusterStateHandler, + new ClusterStateTaskListener() { @Override public void onFailure(String source, Throwable t) { logger.error("{} unexpected failure while failing shard [{}]", t, request.shardRouting.shardId(), request.shardRouting); @@ -156,16 +162,6 @@ public class ShardStateAction extends AbstractComponent { } ); } - - private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry, ClusterStateTaskListener listener) { - logger.warn("{} received shard failed for [{}]", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - clusterService.submitStateUpdateTask( - "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.HIGH), - shardFailedClusterStateHandler, - listener); - } } class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { @@ -230,19 +226,14 @@ public class ShardStateAction extends AbstractComponent { @Override public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - handleShardStartedOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - - private void handleShardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.debug("{} received shard started for [{}]", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - + logger.debug("{} received shard started for [{}]", request.shardRouting.shardId(), request); clusterService.submitStateUpdateTask( - "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", - shardRoutingEntry, + "shard-started (" + request.shardRouting + "), reason [" + request.message + "]", + request, ClusterStateTaskConfig.build(Priority.URGENT), shardStartedClusterStateHandler, shardStartedClusterStateHandler); + channel.sendResponse(TransportResponse.Empty.INSTANCE); } } From 7591f2047a1f46f0ccce44ec0a1e1cbdd6598b2d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 11:17:18 -0500 Subject: [PATCH 304/322] Modify nested classes in ShardStateAction to be static --- .../action/shard/ShardStateAction.java | 52 +++++++++++++------ 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 7a2c3a444fc..7a74248e087 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -58,25 +59,20 @@ import java.util.Locale; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; - public class ShardStateAction extends AbstractComponent { public static final String SHARD_STARTED_ACTION_NAME = "internal:cluster/shard/started"; public static final String SHARD_FAILED_ACTION_NAME = "internal:cluster/shard/failure"; private final TransportService transportService; - private final AllocationService allocationService; - private final RoutingService routingService; @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { super(settings); this.transportService = transportService; - this.allocationService = allocationService; - this.routingService = routingService; - transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService)); - transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService)); + transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateHandler(allocationService, logger), logger)); + transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateHandler(allocationService, routingService, logger), logger)); } public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { @@ -115,11 +111,15 @@ public class ShardStateAction extends AbstractComponent { }); } - private class ShardFailedTransportHandler implements TransportRequestHandler { + private static class ShardFailedTransportHandler implements TransportRequestHandler { private final ClusterService clusterService; + private final ShardFailedClusterStateHandler shardFailedClusterStateHandler; + private final ESLogger logger; - public ShardFailedTransportHandler(ClusterService clusterService) { + public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateHandler shardFailedClusterStateHandler, ESLogger logger) { this.clusterService = clusterService; + this.shardFailedClusterStateHandler = shardFailedClusterStateHandler; + this.logger = logger; } @Override @@ -164,7 +164,17 @@ public class ShardStateAction extends AbstractComponent { } } - class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { + private static class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { + private final AllocationService allocationService; + private final RoutingService routingService; + private final ESLogger logger; + + public ShardFailedClusterStateHandler(AllocationService allocationService, RoutingService routingService, ESLogger logger) { + this.allocationService = allocationService; + this.routingService = routingService; + this.logger = logger; + } + @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder batchResultBuilder = BatchResult.builder(); @@ -198,8 +208,6 @@ public class ShardStateAction extends AbstractComponent { } } - private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); - public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { @@ -217,11 +225,15 @@ public class ShardStateAction extends AbstractComponent { }); } - class ShardStartedTransportHandler implements TransportRequestHandler { + private static class ShardStartedTransportHandler implements TransportRequestHandler { private final ClusterService clusterService; + private final ShardStartedClusterStateHandler shardStartedClusterStateHandler; + private final ESLogger logger; - public ShardStartedTransportHandler(ClusterService clusterService) { + public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateHandler shardStartedClusterStateHandler, ESLogger logger) { this.clusterService = clusterService; + this.shardStartedClusterStateHandler = shardStartedClusterStateHandler; + this.logger = logger; } @Override @@ -237,7 +249,15 @@ public class ShardStateAction extends AbstractComponent { } } - class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + private static class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + private final AllocationService allocationService; + private final ESLogger logger; + + public ShardStartedClusterStateHandler(AllocationService allocationService, ESLogger logger) { + this.allocationService = allocationService; + this.logger = logger; + } + @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder builder = BatchResult.builder(); @@ -266,8 +286,6 @@ public class ShardStateAction extends AbstractComponent { } } - private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = new ShardStartedClusterStateHandler(); - public static class ShardRoutingEntry extends TransportRequest { ShardRouting shardRouting; String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; From af523c42362513fbc4573e88e2f011d8a9878293 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 11:29:00 -0500 Subject: [PATCH 305/322] Fix order of logging parameters in ShardStateAction --- .../elasticsearch/cluster/action/shard/ShardStateAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 7a74248e087..95d6d09451f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -137,7 +137,7 @@ public class ShardStateAction extends AbstractComponent { try { channel.sendResponse(t); } catch (Throwable channelThrowable) { - logger.warn("{} failed to send failure [{}] while failing shard [{}]", channelThrowable, t, request.shardRouting.shardId(), request.shardRouting); + logger.warn("{} failed to send failure [{}] while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), t, request.shardRouting); } } From ff1aed80cc64a1e4d63d50511b7ccea6619d382d Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Mon, 4 Jan 2016 11:11:02 -0600 Subject: [PATCH 306/322] Revert GeoPointField.stored default back to false As a default in V2, the GeoPointField.stored option was set to true. Since this consumes disk space with no positive benefit the default stored option is being reverted back to false. --- .../elasticsearch/index/mapper/geo/GeoPointFieldMapper.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 450c987ae54..71309d2fa2d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -63,7 +63,6 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { FIELD_TYPE.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.setStored(true); FIELD_TYPE.freeze(); } } @@ -127,4 +126,4 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { } super.parse(context, point, geoHash); } -} \ No newline at end of file +} From 974aa04cc0457f01c4facf76bb665516a02c5a32 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 4 Jan 2016 14:48:56 -0500 Subject: [PATCH 307/322] [docs] suggest_mode is per shard --- .../search/suggesters/phrase-suggest.asciidoc | 82 ++++++++++--------- 1 file changed, 43 insertions(+), 39 deletions(-) diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index bc2f016d288..6a13e2bcd05 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -97,20 +97,20 @@ can contain misspellings (See parameter descriptions below). language model, the suggester will use this field to gain statistics to score corrections. This field is mandatory. -`gram_size`:: +`gram_size`:: sets max size of the n-grams (shingles) in the `field`. If the field doesn't contain n-grams (shingles) this should be omitted or set to `1`. Note that Elasticsearch tries to detect the gram size based on the specified `field`. If the field uses a `shingle` filter the `gram_size` is set to the `max_shingle_size` if not explicitly set. -`real_word_error_likelihood`:: +`real_word_error_likelihood`:: the likelihood of a term being a misspelled even if the term exists in the dictionary. The default is `0.95` corresponding to 5% of the real words are misspelled. -`confidence`:: +`confidence`:: The confidence level defines a factor applied to the input phrases score which is used as a threshold for other suggest candidates. Only candidates that score higher than the threshold will be @@ -118,7 +118,7 @@ can contain misspellings (See parameter descriptions below). only return suggestions that score higher than the input phrase. If set to `0.0` the top N candidates are returned. The default is `1.0`. -`max_errors`:: +`max_errors`:: the maximum percentage of the terms that at most considered to be misspellings in order to form a correction. This method accepts a float value in the range `[0..1)` as a fraction of the actual @@ -126,39 +126,39 @@ can contain misspellings (See parameter descriptions below). default is set to `1.0` which corresponds to that only corrections with at most 1 misspelled term are returned. Note that setting this too high can negatively impact performance. Low values like `1` or `2` are recommended - otherwise the time spend in suggest calls might exceed the time spend in + otherwise the time spend in suggest calls might exceed the time spend in query execution. -`separator`:: +`separator`:: the separator that is used to separate terms in the bigram field. If not set the whitespace character is used as a separator. -`size`:: +`size`:: the number of candidates that are generated for each individual query term Low numbers like `3` or `5` typically produce good results. Raising this can bring up terms with higher edit distances. The default is `5`. -`analyzer`:: +`analyzer`:: Sets the analyzer to analyse to suggest text with. Defaults to the search analyzer of the suggest field passed via `field`. -`shard_size`:: +`shard_size`:: Sets the maximum number of suggested term to be retrieved from each individual shard. During the reduce phase, only the top N suggestions are returned based on the `size` option. Defaults to `5`. -`text`:: +`text`:: Sets the text / query to provide suggestions for. `highlight`:: - Sets up suggestion highlighting. If not provided then - no `highlighted` field is returned. If provided must - contain exactly `pre_tag` and `post_tag` which are - wrapped around the changed tokens. If multiple tokens - in a row are changed the entire phrase of changed tokens + Sets up suggestion highlighting. If not provided then + no `highlighted` field is returned. If provided must + contain exactly `pre_tag` and `post_tag` which are + wrapped around the changed tokens. If multiple tokens + in a row are changed the entire phrase of changed tokens is wrapped rather than each token. `collate`:: @@ -217,21 +217,21 @@ curl -XPOST 'localhost:9200/_search' -d { The `phrase` suggester supports multiple smoothing models to balance weight between infrequent grams (grams (shingles) are not existing in -the index) and frequent grams (appear at least once in the index). +the index) and frequent grams (appear at least once in the index). [horizontal] -`stupid_backoff`:: +`stupid_backoff`:: a simple backoff model that backs off to lower order n-gram models if the higher order count is `0` and discounts the lower order n-gram model by a constant factor. The default `discount` is - `0.4`. Stupid Backoff is the default model. + `0.4`. Stupid Backoff is the default model. `laplace`:: a smoothing model that uses an additive smoothing where a constant (typically `1.0` or smaller) is added to all counts to balance - weights, The default `alpha` is `0.5`. + weights, The default `alpha` is `0.5`. -`linear_interpolation`:: +`linear_interpolation`:: a smoothing model that takes the weighted mean of the unigrams, bigrams and trigrams based on user supplied weights (lambdas). Linear Interpolation doesn't have any default values. @@ -244,7 +244,7 @@ The `phrase` suggester uses candidate generators to produce a list of possible terms per term in the given text. A single candidate generator is similar to a `term` suggester called for each individual term in the text. The output of the generators is subsequently scored in combination -with the candidates from the other terms to for suggestion candidates. +with the candidates from the other terms to for suggestion candidates. Currently only one type of candidate generator is supported, the `direct_generator`. The Phrase suggest API accepts a list of generators @@ -256,26 +256,30 @@ called per term in the original text. The direct generators support the following parameters: [horizontal] -`field`:: +`field`:: The field to fetch the candidate suggestions from. This is a required option that either needs to be set globally or per suggestion. -`size`:: +`size`:: The maximum corrections to be returned per suggest text token. `suggest_mode`:: - The suggest mode controls what suggestions are - included or controls for what suggest text terms, suggestions should be - suggested. Three possible values can be specified: - ** `missing`: Only suggest terms in the suggest text that aren't in the - index. This is the default. - ** `popular`: Only suggest suggestions that occur in more docs then the - original suggest text term. + The suggest mode controls what suggestions are included on the suggestions + generated on each shard. All values other than `always` can be thought of + as an optimization to generate fewer suggestions to test on each shard and + are not rechecked at when combining the suggestions generated on each + shard. Thus `missing` will generate suggestions for terms on shards that do + not contain them even other shards do contain them. Those should be + filtered out using `confidence`. Three possible values can be specified: + ** `missing`: Only generate suggestions for terms that are not in the + shard. This is the default. + ** `popular`: Only suggest terms that occur in more docs on the shard then + the original term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. -`max_edits`:: +`max_edits`:: The maximum edit distance candidate suggestions can have in order to be considered as a suggestion. Can only be a value between 1 and 2. Any other value result in an bad request error being thrown. @@ -287,11 +291,11 @@ The direct generators support the following parameters: this number improves spellcheck performance. Usually misspellings don't occur in the beginning of terms. (Old name "prefix_len" is deprecated) -`min_word_length`:: +`min_word_length`:: The minimum length a suggest text term must have in order to be included. Defaults to 4. (Old name "min_word_len" is deprecated) -`max_inspections`:: +`max_inspections`:: A factor that is used to multiply with the `shards_size` in order to inspect more candidate spell corrections on the shard level. Can improve accuracy at the cost of performance. @@ -306,7 +310,7 @@ The direct generators support the following parameters: cannot be fractional. The shard level document frequencies are used for this option. -`max_term_freq`:: +`max_term_freq`:: The maximum threshold in number of documents a suggest text token can exist in order to be included. Can be a relative percentage number (e.g 0.4) or an absolute number to represent document @@ -322,16 +326,16 @@ The direct generators support the following parameters: tokens passed to this candidate generator. This filter is applied to the original token before candidates are generated. -`post_filter`:: +`post_filter`:: a filter (analyzer) that is applied to each of the generated tokens before they are passed to the actual phrase scorer. The following example shows a `phrase` suggest call with two generators, the first one is using a field containing ordinary indexed terms and the -second one uses a field that uses terms indexed with a `reverse` filter -(tokens are index in reverse order). This is used to overcome the limitation -of the direct generators to require a constant prefix to provide -high-performance suggestions. The `pre_filter` and `post_filter` options +second one uses a field that uses terms indexed with a `reverse` filter +(tokens are index in reverse order). This is used to overcome the limitation +of the direct generators to require a constant prefix to provide +high-performance suggestions. The `pre_filter` and `post_filter` options accept ordinary analyzer names. [source,js] From 8a793b65643c1d9ff43b8d61714f23009dacd449 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 4 Jan 2016 16:13:39 -0500 Subject: [PATCH 308/322] Rename Shard(.*)ClusterStateHandler to Shard$1ClusterStateTaskExecutor --- .../action/shard/ShardStateAction.java | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 95d6d09451f..58b766e8d84 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -71,8 +71,8 @@ public class ShardStateAction extends AbstractComponent { super(settings); this.transportService = transportService; - transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateHandler(allocationService, logger), logger)); - transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateHandler(allocationService, routingService, logger), logger)); + transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger)); + transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); } public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { @@ -113,12 +113,12 @@ public class ShardStateAction extends AbstractComponent { private static class ShardFailedTransportHandler implements TransportRequestHandler { private final ClusterService clusterService; - private final ShardFailedClusterStateHandler shardFailedClusterStateHandler; + private final ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor; private final ESLogger logger; - public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateHandler shardFailedClusterStateHandler, ESLogger logger) { + public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, ESLogger logger) { this.clusterService = clusterService; - this.shardFailedClusterStateHandler = shardFailedClusterStateHandler; + this.shardFailedClusterStateTaskExecutor = shardFailedClusterStateTaskExecutor; this.logger = logger; } @@ -129,7 +129,7 @@ public class ShardStateAction extends AbstractComponent { "shard-failed (" + request.shardRouting + "), message [" + request.message + "]", request, ClusterStateTaskConfig.build(Priority.HIGH), - shardFailedClusterStateHandler, + shardFailedClusterStateTaskExecutor, new ClusterStateTaskListener() { @Override public void onFailure(String source, Throwable t) { @@ -164,12 +164,12 @@ public class ShardStateAction extends AbstractComponent { } } - private static class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor { + private static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor { private final AllocationService allocationService; private final RoutingService routingService; private final ESLogger logger; - public ShardFailedClusterStateHandler(AllocationService allocationService, RoutingService routingService, ESLogger logger) { + public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) { this.allocationService = allocationService; this.routingService = routingService; this.logger = logger; @@ -227,12 +227,12 @@ public class ShardStateAction extends AbstractComponent { private static class ShardStartedTransportHandler implements TransportRequestHandler { private final ClusterService clusterService; - private final ShardStartedClusterStateHandler shardStartedClusterStateHandler; + private final ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor; private final ESLogger logger; - public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateHandler shardStartedClusterStateHandler, ESLogger logger) { + public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, ESLogger logger) { this.clusterService = clusterService; - this.shardStartedClusterStateHandler = shardStartedClusterStateHandler; + this.shardStartedClusterStateTaskExecutor = shardStartedClusterStateTaskExecutor; this.logger = logger; } @@ -243,17 +243,17 @@ public class ShardStateAction extends AbstractComponent { "shard-started (" + request.shardRouting + "), reason [" + request.message + "]", request, ClusterStateTaskConfig.build(Priority.URGENT), - shardStartedClusterStateHandler, - shardStartedClusterStateHandler); + shardStartedClusterStateTaskExecutor, + shardStartedClusterStateTaskExecutor); channel.sendResponse(TransportResponse.Empty.INSTANCE); } } - private static class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + private static class ShardStartedClusterStateTaskExecutor implements ClusterStateTaskExecutor, ClusterStateTaskListener { private final AllocationService allocationService; private final ESLogger logger; - public ShardStartedClusterStateHandler(AllocationService allocationService, ESLogger logger) { + public ShardStartedClusterStateTaskExecutor(AllocationService allocationService, ESLogger logger) { this.allocationService = allocationService; this.logger = logger; } From f6020a6ff29824757855348157933e5cd90b49ba Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 4 Jan 2016 23:40:20 +0100 Subject: [PATCH 309/322] Close recovered translog readers if createWriter fails If we fail to create a writer all recovered translog readers are not closed today which causes all open files to leak. Closes #15754 --- .../index/translog/Translog.java | 15 ++++++++++-- .../index/translog/TranslogReader.java | 2 +- .../index/translog/TranslogTests.java | 23 +++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index c2cb3c19af8..3f8f0ab54df 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -167,8 +167,19 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (recoveredTranslogs.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); } - current = createWriter(checkpoint.generation + 1); - this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; + boolean success = false; + try { + current = createWriter(checkpoint.generation + 1); + this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; + success = true; + } finally { + // we have to close all the recovered ones otherwise we leak file handles here + // for instance if we have a lot of tlog and we can't create the writer we keep on holding + // on to all the uncommitted tlog files if we don't close + if (success == false) { + IOUtils.closeWhileHandlingException(recoveredTranslogs); + } + } } else { this.recoveredTranslogs = Collections.emptyList(); IOUtils.rm(location); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index d7077fd90ad..71dff6ec36e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -138,7 +138,7 @@ public abstract class TranslogReader implements Closeable, Comparable Date: Tue, 5 Jan 2016 10:26:39 +0100 Subject: [PATCH 310/322] Fix GeoPointFieldMapperTests expectations. --- .../index/mapper/geo/GeoPointFieldMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 299646b3c7f..758e5a38294 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -69,7 +69,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - final boolean stored = indexCreatedBefore22 == false; + final boolean stored = false; assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored)); assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(stored)); From 3a41dfe1dd4196f821d5808711c8c4a1dbec1308 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 10:30:26 +0100 Subject: [PATCH 311/322] Add serialization support for more important IOExceptions Several IOExceptions are always wrapped in an NotSerializableWrapper which is annoying to read. These exceptions are important to get right across the network and we should support the important ones that indicate problems on the Filesystem. This commit also adds general support for IOException to preserve the parent type across the network if no specific type is serializable. --- .../common/io/stream/StreamInput.java | 41 ++++++++++++++- .../common/io/stream/StreamOutput.java | 35 +++++++++++-- .../ExceptionSerializationTests.java | 51 ++++++++++++++++--- 3 files changed, 116 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index ffcb4201f4d..7a818f49d89 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -46,7 +46,14 @@ import java.io.FileNotFoundException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -592,11 +599,41 @@ public abstract class StreamInput extends InputStream { case 13: return (T) readStackTrace(new FileNotFoundException(readOptionalString()), this); case 14: + final int subclass = readVInt(); final String file = readOptionalString(); final String other = readOptionalString(); final String reason = readOptionalString(); readOptionalString(); // skip the msg - it's composed from file, other and reason - return (T) readStackTrace(new NoSuchFileException(file, other, reason), this); + final Throwable throwable; + switch (subclass) { + case 0: + throwable = new NoSuchFileException(file, other, reason); + break; + case 1: + throwable = new NotDirectoryException(file); + break; + case 2: + throwable = new DirectoryNotEmptyException(file); + break; + case 3: + throwable = new AtomicMoveNotSupportedException(file, other, reason); + break; + case 4: + throwable = new FileAlreadyExistsException(file, other, reason); + break; + case 5: + throwable = new AccessDeniedException(file, other, reason); + break; + case 6: + throwable = new FileSystemLoopException(file); + break; + case 7: + throwable = new FileSystemException(file, other, reason); + break; + default: + throw new IllegalStateException("unknown FileSystemException with index " + subclass); + } + return (T) readStackTrace(throwable, this); case 15: return (T) readStackTrace(new OutOfMemoryError(readOptionalString()), this); case 16: @@ -605,6 +642,8 @@ public abstract class StreamInput extends InputStream { return (T) readStackTrace(new LockObtainFailedException(readOptionalString(), readThrowable()), this); case 18: return (T) readStackTrace(new InterruptedException(readOptionalString()), this); + case 19: + return (T) readStackTrace(new IOException(readOptionalString(), readThrowable()), this); default: assert false : "no such exception for id: " + key; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index e8997b8073f..ad02a0fd525 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -42,7 +42,15 @@ import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; +import java.nio.channels.ClosedChannelException; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; @@ -565,11 +573,28 @@ public abstract class StreamOutput extends OutputStream { } else if (throwable instanceof FileNotFoundException) { writeVInt(13); writeCause = false; - } else if (throwable instanceof NoSuchFileException) { + } else if (throwable instanceof FileSystemException) { writeVInt(14); - writeOptionalString(((NoSuchFileException) throwable).getFile()); - writeOptionalString(((NoSuchFileException) throwable).getOtherFile()); - writeOptionalString(((NoSuchFileException) throwable).getReason()); + if (throwable instanceof NoSuchFileException) { + writeVInt(0); + } else if (throwable instanceof NotDirectoryException) { + writeVInt(1); + } else if (throwable instanceof DirectoryNotEmptyException) { + writeVInt(2); + } else if (throwable instanceof AtomicMoveNotSupportedException) { + writeVInt(3); + } else if (throwable instanceof FileAlreadyExistsException) { + writeVInt(4); + } else if (throwable instanceof AccessDeniedException) { + writeVInt(5); + } else if (throwable instanceof FileSystemLoopException) { + writeVInt(6); + } else { + writeVInt(7); + } + writeOptionalString(((FileSystemException) throwable).getFile()); + writeOptionalString(((FileSystemException) throwable).getOtherFile()); + writeOptionalString(((FileSystemException) throwable).getReason()); writeCause = false; } else if (throwable instanceof OutOfMemoryError) { writeVInt(15); @@ -581,6 +606,8 @@ public abstract class StreamOutput extends OutputStream { } else if (throwable instanceof InterruptedException) { writeVInt(18); writeCause = false; + } else if (throwable instanceof IOException) { + writeVInt(19); } else { ElasticsearchException ex; if (throwable instanceof ElasticsearchException && ElasticsearchException.isRegistered(throwable.getClass())) { diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 6650f596755..975de9e8f0e 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -18,8 +18,6 @@ */ package org.elasticsearch; -import com.fasterxml.jackson.core.JsonLocation; -import com.fasterxml.jackson.core.JsonParseException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.TimestampParsingException; @@ -86,11 +84,20 @@ import org.elasticsearch.transport.ConnectTransportException; import java.io.IOException; import java.lang.reflect.Modifier; import java.net.URISyntaxException; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -548,17 +555,17 @@ public class ExceptionSerializationTests extends ESTestCase { assertEquals("{\"type\":\"illegal_argument_exception\",\"reason\":\"nono!\"}", toXContent(ex)); Throwable[] unknowns = new Throwable[]{ - new JsonParseException("foobar", new JsonLocation(new Object(), 1, 2, 3, 4)), + new Exception("foobar"), new ClassCastException("boom boom boom"), - new IOException("booom") + new UnsatisfiedLinkError("booom") }; for (Throwable t : unknowns) { if (randomBoolean()) { - t.addSuppressed(new IOException("suppressed")); + t.addSuppressed(new UnsatisfiedLinkError("suppressed")); t.addSuppressed(new NullPointerException()); } Throwable deserialized = serialize(t); - assertTrue(deserialized instanceof NotSerializableExceptionWrapper); + assertTrue(deserialized.getClass().toString(), deserialized instanceof NotSerializableExceptionWrapper); assertArrayEquals(t.getStackTrace(), deserialized.getStackTrace()); assertEquals(t.getSuppressed().length, deserialized.getSuppressed().length); if (t.getSuppressed().length > 0) { @@ -795,4 +802,36 @@ public class ExceptionSerializationTests extends ESTestCase { } } } + + public void testIOException() throws IOException { + IOException serialize = serialize(new IOException("boom", new NullPointerException())); + assertEquals("boom", serialize.getMessage()); + assertTrue(serialize.getCause() instanceof NullPointerException); + } + + + public void testFileSystemExceptions() throws IOException { + for (FileSystemException ex : Arrays.asList(new FileSystemException("a", "b", "c"), + new NoSuchFileException("a", "b", "c"), + new NotDirectoryException("a"), + new DirectoryNotEmptyException("a"), + new AtomicMoveNotSupportedException("a", "b", "c"), + new FileAlreadyExistsException("a", "b", "c"), + new AccessDeniedException("a", "b", "c"), + new FileSystemLoopException("a"))) { + + FileSystemException serialize = serialize(ex); + assertEquals(serialize.getClass(), ex.getClass()); + assertEquals("a", serialize.getFile()); + if (serialize.getClass() == NotDirectoryException.class || + serialize.getClass() == FileSystemLoopException.class || + serialize.getClass() == DirectoryNotEmptyException.class) { + assertNull(serialize.getOtherFile()); + assertNull(serialize.getReason()); + } else { + assertEquals(serialize.getClass().toString(), "b", serialize.getOtherFile()); + assertEquals(serialize.getClass().toString(), "c", serialize.getReason()); + } + } + } } From d48af9a155d87bdfc589fdb8d0ebfd62d72b1564 Mon Sep 17 00:00:00 2001 From: Eugene Pirogov Date: Mon, 4 Jan 2016 16:55:38 +0200 Subject: [PATCH 312/322] Fix indent in example Previously it would look like if `warnings` key is nested under `errors`. --- .../bucket/filters-aggregation.asciidoc | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc index 3e81e99eb99..322dccb790f 100644 --- a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc @@ -46,19 +46,18 @@ Response: "buckets" : { "errors" : { "doc_count" : 34, - "monthly" : { - "buckets" : [ - ... // the histogram monthly breakdown - ] - } - }, - "warnings" : { - "doc_count" : 439, - "monthly" : { - "buckets" : [ - ... // the histogram monthly breakdown - ] - } + "monthly" : { + "buckets" : [ + ... // the histogram monthly breakdown + ] + } + }, + "warnings" : { + "doc_count" : 439, + "monthly" : { + "buckets" : [ + ... // the histogram monthly breakdown + ] } } } From f69502dd043d151e2bf6fc91dea93b7a54d3b23b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 12:25:08 +0100 Subject: [PATCH 313/322] Simplify TranslogWriter to always write to a stream We used to write into an in-memory buffer and if necessary also allow reading from the memory buffer if the some translog locations that are not flushed to the channel need to be read. This commit hides all writing behind a buffered output stream and if ncecessary flushes all buffered data to the channel for reading. This allows for several simplifcations like reusing javas build in BufferedOutputStream and removes the need for read write locks on the translog writer. All thread safety is now achived using the synchronized primitive. --- .../index/translog/TranslogWriter.java | 201 ++++++++---------- 1 file changed, 83 insertions(+), 118 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 49392088692..d0051714b07 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -42,6 +42,7 @@ import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -53,35 +54,23 @@ public class TranslogWriter extends TranslogReader { public static final int VERSION = VERSION_CHECKPOINTS; private final ShardId shardId; - private final ReleasableLock readLock; - private final ReleasableLock writeLock; /* the offset in bytes that was written when the file was last synced*/ private volatile long lastSyncedOffset; /* the number of translog operations written to this file */ private volatile int operationCounter; - /* the offset in bytes written to the file */ - private volatile long writtenOffset; /* if we hit an exception that we can't recover from we assign it to this var and ship it with every AlreadyClosedException we throw */ private volatile Throwable tragedy; - - private final byte[] buffer; - private int bufferCount; - private WrapperOutputStream bufferOs = new WrapperOutputStream(); - + /* A buffered outputstream what writes to the writers channel */ + private final OutputStream outputStream; /* the total offset of this file including the bytes written to the file as well as into the buffer */ private volatile long totalOffset; - public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, ByteSizeValue bufferSize) throws IOException { super(generation, channelReference, channelReference.getChannel().position()); this.shardId = shardId; - ReadWriteLock rwl = new ReentrantReadWriteLock(); - readLock = new ReleasableLock(rwl.readLock()); - writeLock = new ReleasableLock(rwl.writeLock()); - this.writtenOffset = channelReference.getChannel().position(); - this.totalOffset = writtenOffset; - this.buffer = new byte[bufferSize.bytesAsInt()]; + this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channelReference.getChannel()), bufferSize.bytesAsInt()); this.lastSyncedOffset = channelReference.getChannel().position(); + totalOffset = lastSyncedOffset; } public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { @@ -116,75 +105,59 @@ public class TranslogWriter extends TranslogReader { return tragedy; } - private final void closeWithTragicEvent(Throwable throwable) throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { - if (tragedy == null) { - tragedy = throwable; - } else { - tragedy.addSuppressed(throwable); - } - close(); + private synchronized final void closeWithTragicEvent(Throwable throwable) throws IOException { + if (tragedy == null) { + tragedy = throwable; + } else { + tragedy.addSuppressed(throwable); } + close(); } /** * add the given bytes to the translog and return the location they were written at */ - public Translog.Location add(BytesReference data) throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - final long offset = totalOffset; - if (data.length() >= buffer.length) { - flush(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - try { - data.writeTo(channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += data.length(); - totalOffset += data.length(); - } else { - if (data.length() > buffer.length - bufferCount) { - flush(); - } - data.writeTo(bufferOs); - totalOffset += data.length(); - } - operationCounter++; - return new Translog.Location(generation, offset, data.length()); + public synchronized Translog.Location add(BytesReference data) throws IOException { + ensureOpen(); + final long offset = totalOffset; + try { + data.writeTo(outputStream); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; } + totalOffset += data.length(); + operationCounter++; + return new Translog.Location(generation, offset, data.length()); } /** * write all buffered ops to disk and fsync file */ - public synchronized void sync() throws IOException { + public void sync() throws IOException { if (syncNeeded()) { - ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event - channelReference.incRef(); - try { - final long offsetToSync; - final int opsCounter; - try (ReleasableLock lock = writeLock.acquire()) { - flush(); + synchronized (this) { + ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event + channelReference.incRef(); + try { + final long offsetToSync; + final int opsCounter; + outputStream.flush(); offsetToSync = totalOffset; opsCounter = operationCounter; + // we can do this outside of the write lock but we have to protect from + // concurrent syncs + ensureOpen(); // just for kicks - the checkpoint happens or not either way + try { + checkpoint(offsetToSync, opsCounter, channelReference); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + lastSyncedOffset = offsetToSync; + }finally{ + channelReference.decRef(); } - // we can do this outside of the write lock but we have to protect from - // concurrent syncs - ensureOpen(); // just for kicks - the checkpoint happens or not either way - try { - checkpoint(offsetToSync, opsCounter, channelReference); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } finally { - channelReference.decRef(); } } } @@ -204,28 +177,6 @@ public class TranslogWriter extends TranslogReader { return totalOffset; } - - /** - * Flushes the buffer if the translog is buffered. - */ - private final void flush() throws IOException { - assert writeLock.isHeldByCurrentThread(); - if (bufferCount > 0) { - ensureOpen(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - final int bufferSize = bufferCount; - try { - Channels.writeToChannel(buffer, 0, bufferSize, channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += bufferSize; - bufferCount = 0; - } - } - /** * returns a new reader that follows the current writes (most importantly allows making * repeated snapshots that includes new content) @@ -235,7 +186,7 @@ public class TranslogWriter extends TranslogReader { channelReference.incRef(); boolean success = false; try { - TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); + final TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); success = true; return reader; } finally { @@ -250,16 +201,18 @@ public class TranslogWriter extends TranslogReader { */ public ImmutableTranslogReader immutableReader() throws TranslogException { if (channelReference.tryIncRef()) { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - flush(); - ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, writtenOffset, operationCounter); - channelReference.incRef(); // for new reader - return reader; - } catch (Exception e) { - throw new TranslogException(shardId, "exception while creating an immutable reader", e); - } finally { - channelReference.decRef(); + synchronized (this) { + try { + ensureOpen(); + outputStream.flush(); + ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, getWrittenOffset(), operationCounter); + channelReference.incRef(); // for new reader + return reader; + } catch (Exception e) { + throw new TranslogException(shardId, "exception while creating an immutable reader", e); + } finally { + channelReference.decRef(); + } } } else { throw new TranslogException(shardId, "can't increment channel [" + channelReference + "] ref count"); @@ -272,6 +225,10 @@ public class TranslogWriter extends TranslogReader { return new BytesArray(buffer.array()).equals(expectedBytes); } + private long getWrittenOffset() throws IOException { + return channelReference.getChannel().position(); + } + /** * this class is used when one wants a reference to this file which exposes all recently written operation. * as such it needs access to the internals of the current reader @@ -313,14 +270,9 @@ public class TranslogWriter extends TranslogReader { @Override protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - if (position >= writtenOffset) { - assert targetBuffer.hasArray() : "buffer must have array"; - final int sourcePosition = (int) (position - writtenOffset); - System.arraycopy(buffer, sourcePosition, - targetBuffer.array(), targetBuffer.position(), targetBuffer.limit()); - targetBuffer.position(targetBuffer.limit()); - return; + if (position+targetBuffer.limit() > getWrittenOffset()) { + synchronized (this) { + outputStream.flush(); } } // we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete @@ -355,18 +307,31 @@ public class TranslogWriter extends TranslogReader { } } - class WrapperOutputStream extends OutputStream { - @Override - public void write(int b) throws IOException { - buffer[bufferCount++] = (byte) b; + private final class BufferedChannelOutputStream extends BufferedOutputStream { + + public BufferedChannelOutputStream(OutputStream out, int size) throws IOException { + super(out, size); } @Override - public void write(byte[] b, int off, int len) throws IOException { - // we do safety checked when we decide to use this stream... - System.arraycopy(b, off, buffer, bufferCount, len); - bufferCount += len; + public synchronized void flush() throws IOException { + if (count > 0) { + try { + ensureOpen(); + super.flush(); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + } + } + + @Override + public void close() throws IOException { + // the stream is intentionally not closed because + // closing it will close the FileChannel + throw new IllegalStateException("never close this stream"); } } } From 2370ace922562c97edf210d3546d040dbda66513 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 15:45:41 +0100 Subject: [PATCH 314/322] assert that tragic even exception is never null --- .../java/org/elasticsearch/index/translog/TranslogWriter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index d0051714b07..81e5d9407a5 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -106,6 +106,7 @@ public class TranslogWriter extends TranslogReader { } private synchronized final void closeWithTragicEvent(Throwable throwable) throws IOException { + assert throwable != null : "throwable must not be null in a tragic event"; if (tragedy == null) { tragedy = throwable; } else { From 469a179ab11176e08f1db9c6b56e723fd4d0f583 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 15:48:35 +0100 Subject: [PATCH 315/322] remove stale comments --- .../java/org/elasticsearch/index/translog/TranslogWriter.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 81e5d9407a5..1434fb1cb1e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -146,9 +146,6 @@ public class TranslogWriter extends TranslogReader { outputStream.flush(); offsetToSync = totalOffset; opsCounter = operationCounter; - // we can do this outside of the write lock but we have to protect from - // concurrent syncs - ensureOpen(); // just for kicks - the checkpoint happens or not either way try { checkpoint(offsetToSync, opsCounter, channelReference); } catch (Throwable ex) { From 5272c98136023caa93fcef75bb207ea90e64dbff Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 15:50:10 +0100 Subject: [PATCH 316/322] format finally block --- .../java/org/elasticsearch/index/translog/TranslogWriter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 1434fb1cb1e..094f5570777 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -153,7 +153,7 @@ public class TranslogWriter extends TranslogReader { throw ex; } lastSyncedOffset = offsetToSync; - }finally{ + } finally { channelReference.decRef(); } } From 419f3976c2c29f02b82fd4c376527cb562a5c7be Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 5 Jan 2016 09:41:58 -0500 Subject: [PATCH 317/322] Fix NPE in TestClusterService when waiting indefinitely When waiting indefinitely for a new cluster state in a test, TestClusterService#add will throw a NullPointerException if the timeout is null. Instead, TestClusterService#add should guard against a null timeout and not even attempt to add a notification for the timeout expiring. Note that the usage of null is the agreed upon contract for specifying an indefinite wait from ClusterStateObserver. --- .../elasticsearch/test/cluster/TestClusterService.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index 6e17eae1be4..93327213bbc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -184,9 +184,11 @@ public class TestClusterService implements ClusterService { if (threadPool == null) { throw new UnsupportedOperationException("TestClusterService wasn't initialized with a thread pool"); } - NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); - notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); - onGoingTimeouts.add(notifyTimeout); + if (timeout != null) { + NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); + notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); + onGoingTimeouts.add(notifyTimeout); + } listeners.add(listener); listener.postAdded(); } From ea6718d87855467325f969b3b98d5719b9671ce2 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 16:34:26 +0100 Subject: [PATCH 318/322] Add pending review from @bleskes on #15771 --- .../org/elasticsearch/index/translog/TranslogWriter.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 094f5570777..75f306e5435 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Callback; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.shard.ShardId; import java.io.BufferedOutputStream; @@ -42,9 +41,6 @@ import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; public class TranslogWriter extends TranslogReader { @@ -268,12 +264,12 @@ public class TranslogWriter extends TranslogReader { @Override protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - if (position+targetBuffer.limit() > getWrittenOffset()) { + if (position+targetBuffer.remaining() > getWrittenOffset()) { synchronized (this) { outputStream.flush(); } } - // we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete + // we don't have to have a lock here because we only write ahead to the file, so all writes has been complete // for the requested location. Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } From dff30ece05f561263c4aa1968fbd1a9fca6504ce Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 5 Jan 2016 17:58:20 +0100 Subject: [PATCH 319/322] Double check if stream must be flush to allow tests to make better assumptions of what is visible and what isn't after tragic events --- .../org/elasticsearch/index/translog/TranslogWriter.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 75f306e5435..026aac4515e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -266,7 +266,13 @@ public class TranslogWriter extends TranslogReader { protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { if (position+targetBuffer.remaining() > getWrittenOffset()) { synchronized (this) { - outputStream.flush(); + // we only flush here if it's really really needed - try to minimize the impact of the read operation + // in some cases ie. a tragic event we might still be able to read the relevant value + // which is not really important in production but some test can make most strict assumptions + // if we don't fail in this call unless absolutely necessary. + if (position+targetBuffer.remaining() > getWrittenOffset()) { + outputStream.flush(); + } } } // we don't have to have a lock here because we only write ahead to the file, so all writes has been complete From a89dba27c25d02f7b7c01bf569a29588392f6f48 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 5 Jan 2016 10:07:34 -0500 Subject: [PATCH 320/322] Task Management: Add framework for registering and communicating with tasks Adds task manager class and enables all activities to register with the task manager. Currently, the immutable Transport*Activity class represents activity itself shared across all requests. This PR adds and an additional structure Task that keeps track of currently running requests and can be used to communicate with these requests using TransportTaskAction. Related to #15117 --- .../elasticsearch/action/ActionModule.java | 3 + .../action/TaskOperationFailure.java | 117 +++ .../health/TransportClusterHealthAction.java | 11 +- .../node/tasks/list/ListTasksAction.java | 46 ++ .../node/tasks/list/ListTasksRequest.java | 69 ++ .../tasks/list/ListTasksRequestBuilder.java | 41 ++ .../node/tasks/list/ListTasksResponse.java | 159 +++++ .../cluster/node/tasks/list/TaskInfo.java | 140 ++++ .../tasks/list/TransportListTasksAction.java | 69 ++ .../close/TransportCloseIndexAction.java | 5 +- .../delete/TransportDeleteIndexAction.java | 5 +- .../open/TransportOpenIndexAction.java | 5 +- .../type/TransportSearchTypeAction.java | 4 +- .../action/support/ActionFilter.java | 7 +- .../action/support/ActionFilterChain.java | 3 +- .../action/support/ChildTaskRequest.java | 71 ++ .../support/HandledTransportAction.java | 10 +- .../action/support/TransportAction.java | 44 +- .../master/TransportMasterNodeAction.java | 24 +- .../action/support/nodes/BaseNodeRequest.java | 4 +- .../support/nodes/TransportNodesAction.java | 22 +- .../TransportReplicationAction.java | 2 +- .../shard/TransportSingleShardAction.java | 2 +- .../support/tasks/BaseTasksRequest.java | 195 +++++ .../support/tasks/BaseTasksResponse.java | 92 +++ .../support/tasks/TasksRequestBuilder.java | 54 ++ .../support/tasks/TransportTasksAction.java | 380 ++++++++++ .../client/ClusterAdminClient.java | 26 + .../org/elasticsearch/client/Requests.java | 22 + .../client/support/AbstractClient.java | 19 + .../elasticsearch/cluster/ClusterService.java | 6 + .../service/InternalClusterService.java | 9 + .../common/io/stream/StreamInput.java | 13 + .../common/io/stream/StreamInputReader.java | 33 + .../common/io/stream/StreamOutput.java | 10 + .../common/network/NetworkModule.java | 6 +- .../node/tasks/RestListTasksAction.java | 61 ++ .../org/elasticsearch/tasks/ChildTask.java | 57 ++ .../java/org/elasticsearch/tasks/Task.java | 79 +++ .../org/elasticsearch/tasks/TaskManager.java | 76 ++ .../transport/DelegatingTransportChannel.java | 74 ++ .../transport/RequestHandlerRegistry.java | 64 +- .../transport/TransportChannel.java | 4 + .../transport/TransportRequest.java | 12 + .../transport/TransportRequestHandler.java | 9 + .../transport/TransportService.java | 28 +- .../transport/local/LocalTransport.java | 4 +- .../local/LocalTransportChannel.java | 10 + .../netty/MessageChannelHandler.java | 4 +- .../netty/NettyTransportChannel.java | 10 + .../admin/cluster/node/tasks/TasksIT.java | 38 + .../node/tasks/TransportTasksActionTests.java | 664 ++++++++++++++++++ .../TransportActionFilterChainTests.java | 32 +- .../TransportBroadcastByNodeActionTests.java | 13 +- .../TransportMasterNodeActionTests.java | 7 +- .../TransportReplicationActionTests.java | 10 + .../client/node/NodeClientHeadersTests.java | 4 +- .../TransportClientHeadersTests.java | 1 + .../TransportClientNodesServiceTests.java | 1 + .../PublishClusterStateActionTests.java | 10 + .../local/SimpleLocalTransportTests.java | 2 +- .../transport/netty/NettyTransportIT.java | 4 +- .../netty/SimpleNettyTransportTests.java | 3 +- docs/reference/tasks/list.asciidoc | 46 ++ .../messy/tests/IndicesRequestTests.java | 1 + .../deletebyquery/DeleteByQueryRequest.java | 2 +- .../rest-api-spec/api/tasks.list.json | 35 + .../test/tasks.list/10_basic.yaml | 6 + .../test/cluster/NoopClusterService.java | 6 + .../test/cluster/TestClusterService.java | 8 + .../test/transport/MockTransportService.java | 1 + 71 files changed, 3042 insertions(+), 72 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java create mode 100644 core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java create mode 100644 core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java create mode 100644 core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java create mode 100644 core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java create mode 100644 core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java create mode 100644 core/src/main/java/org/elasticsearch/common/io/stream/StreamInputReader.java create mode 100644 core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java create mode 100644 core/src/main/java/org/elasticsearch/tasks/ChildTask.java create mode 100644 core/src/main/java/org/elasticsearch/tasks/Task.java create mode 100644 core/src/main/java/org/elasticsearch/tasks/TaskManager.java create mode 100644 core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java create mode 100644 core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java create mode 100644 core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java create mode 100644 docs/reference/tasks/list.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 8f0148d50ae..11cafb326a0 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; @@ -255,6 +257,7 @@ public class ActionModule extends AbstractModule { registerAction(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); registerAction(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); registerAction(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); + registerAction(ListTasksAction.INSTANCE, TransportListTasksAction.class); registerAction(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); registerAction(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); diff --git a/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java b/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java new file mode 100644 index 00000000000..bf5051c1a19 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +import static org.elasticsearch.ExceptionsHelper.detailedMessage; + +/** + * Information about task operation failures + * + * The class is final due to serialization limitations + */ +public final class TaskOperationFailure implements Writeable, ToXContent { + + private final String nodeId; + + private final long taskId; + + private final Throwable reason; + + private final RestStatus status; + + public TaskOperationFailure(StreamInput in) throws IOException { + nodeId = in.readString(); + taskId = in.readLong(); + reason = in.readThrowable(); + status = RestStatus.readFrom(in); + } + + public TaskOperationFailure(String nodeId, long taskId, Throwable t) { + this.nodeId = nodeId; + this.taskId = taskId; + this.reason = t; + status = ExceptionsHelper.status(t); + } + + public String getNodeId() { + return this.nodeId; + } + + public long getTaskId() { + return this.taskId; + } + + public String getReason() { + return detailedMessage(reason); + } + + public RestStatus getStatus() { + return status; + } + + public Throwable getCause() { + return reason; + } + + @Override + public TaskOperationFailure readFrom(StreamInput in) throws IOException { + return new TaskOperationFailure(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(nodeId); + out.writeLong(taskId); + out.writeThrowable(reason); + RestStatus.writeTo(out, status); + } + + @Override + public String toString() { + return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("task_id", getTaskId()); + builder.field("node_id", getNodeId()); + builder.field("status", status.name()); + if (reason != null) { + builder.field("reason"); + builder.startObject(); + ElasticsearchException.toXContent(builder, params, reason); + builder.endObject(); + } + return builder; + + } + +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 9830305662e..79adbafa9bb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -75,7 +76,13 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< } @Override - protected void masterOperation(final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { + protected final void masterOperation(ClusterHealthRequest request, ClusterState state, ActionListener listener) throws Exception { + logger.warn("attempt to execute a cluster health operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void masterOperation(Task task, final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { if (request.waitForEvents() != null) { final long endTimeMS = TimeValue.nsecToMSec(System.nanoTime()) + request.timeout().millis(); clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", new ClusterStateUpdateTask(request.waitForEvents()) { @@ -95,7 +102,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< @Override public void onNoLongerMaster(String source) { logger.trace("stopped being master while waiting for events with priority [{}]. retrying.", request.waitForEvents()); - doExecute(request, listener); + doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java new file mode 100644 index 00000000000..acc11861108 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Action for retrieving a list of currently running tasks + */ +public class ListTasksAction extends Action { + + public static final ListTasksAction INSTANCE = new ListTasksAction(); + public static final String NAME = "cluster:monitor/tasks/lists"; + + private ListTasksAction() { + super(NAME); + } + + @Override + public ListTasksResponse newResponse() { + return new ListTasksResponse(); + } + + @Override + public ListTasksRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new ListTasksRequestBuilder(client, this); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java new file mode 100644 index 00000000000..0b0637e0b8e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * A request to get node tasks + */ +public class ListTasksRequest extends BaseTasksRequest { + + private boolean detailed = false; + + /** + * Get information from nodes based on the nodes ids specified. If none are passed, information + * for all nodes will be returned. + */ + public ListTasksRequest(String... nodesIds) { + super(nodesIds); + } + + /** + * Should the detailed task information be returned. + */ + public boolean detailed() { + return this.detailed; + } + + /** + * Should the node settings be returned. + */ + public ListTasksRequest detailed(boolean detailed) { + this.detailed = detailed; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + detailed = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(detailed); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java new file mode 100644 index 00000000000..2b462014f43 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.support.tasks.TasksRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Builder for the request to retrieve the list of tasks running on the specified nodes + */ +public class ListTasksRequestBuilder extends TasksRequestBuilder { + + public ListTasksRequestBuilder(ElasticsearchClient client, ListTasksAction action) { + super(client, action, new ListTasksRequest()); + } + + /** + * Should detailed task information be returned. + */ + public ListTasksRequestBuilder setDetailed(boolean detailed) { + request.detailed(detailed); + return this; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java new file mode 100644 index 00000000000..2da9701fcfa --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -0,0 +1,159 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Returns the list of tasks currently running on the nodes + */ +public class ListTasksResponse extends BaseTasksResponse implements ToXContent { + + private List tasks; + + private Map> nodes; + + public ListTasksResponse() { + } + + public ListTasksResponse(List tasks, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tasks = Collections.unmodifiableList(in.readList(TaskInfo::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(tasks); + } + + /** + * Returns the list of tasks by node + */ + public Map> getPerNodeTasks() { + if (nodes != null) { + return nodes; + } + Map> nodeTasks = new HashMap<>(); + + Set nodes = new HashSet<>(); + for (TaskInfo shard : tasks) { + nodes.add(shard.getNode()); + } + + for (DiscoveryNode node : nodes) { + List tasks = new ArrayList<>(); + for (TaskInfo taskInfo : this.tasks) { + if (taskInfo.getNode().equals(node)) { + tasks.add(taskInfo); + } + } + nodeTasks.put(node, tasks); + } + this.nodes = nodeTasks; + return nodeTasks; + } + + public List getTasks() { + return tasks; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (getTaskFailures() != null && getTaskFailures().size() > 0) { + builder.startArray("task_failures"); + for (TaskOperationFailure ex : getTaskFailures()){ + builder.value(ex); + } + builder.endArray(); + } + + if (getNodeFailures() != null && getNodeFailures().size() > 0) { + builder.startArray("node_failures"); + for (FailedNodeException ex : getNodeFailures()){ + builder.value(ex); + } + builder.endArray(); + } + + builder.startObject("nodes"); + for (Map.Entry> entry : getPerNodeTasks().entrySet()) { + DiscoveryNode node = entry.getKey(); + builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE); + builder.field("name", node.name()); + builder.field("transport_address", node.address().toString()); + builder.field("host", node.getHostName()); + builder.field("ip", node.getAddress()); + + if (!node.attributes().isEmpty()) { + builder.startObject("attributes"); + for (ObjectObjectCursor attr : node.attributes()) { + builder.field(attr.key, attr.value, XContentBuilder.FieldCaseConversion.NONE); + } + builder.endObject(); + } + builder.startArray("tasks"); + for(TaskInfo task : entry.getValue()) { + task.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java new file mode 100644 index 00000000000..ed43da2c4ed --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * Information about a currently running task. + *

        + * Tasks are used for communication with transport actions. As a result, they can contain callback + * references as well as mutable state. That makes it impractical to send tasks over transport channels + * and use in APIs. Instead, immutable and streamable TaskInfo objects are used to represent + * snapshot information about currently running tasks. + */ +public class TaskInfo implements Writeable, ToXContent { + + private final DiscoveryNode node; + + private final long id; + + private final String type; + + private final String action; + + private final String description; + + private final String parentNode; + + private final long parentId; + + public TaskInfo(DiscoveryNode node, long id, String type, String action, String description) { + this(node, id, type, action, description, null, -1L); + } + + public TaskInfo(DiscoveryNode node, long id, String type, String action, String description, String parentNode, long parentId) { + this.node = node; + this.id = id; + this.type = type; + this.action = action; + this.description = description; + this.parentNode = parentNode; + this.parentId = parentId; + } + + public TaskInfo(StreamInput in) throws IOException { + node = DiscoveryNode.readNode(in); + id = in.readLong(); + type = in.readString(); + action = in.readString(); + description = in.readOptionalString(); + parentNode = in.readOptionalString(); + parentId = in.readLong(); + } + + public DiscoveryNode getNode() { + return node; + } + + public long getId() { + return id; + } + + public String getType() { + return type; + } + + public String getAction() { + return action; + } + + public String getDescription() { + return description; + } + + public String getParentNode() { + return parentNode; + } + + public long getParentId() { + return parentId; + } + + @Override + public TaskInfo readFrom(StreamInput in) throws IOException { + return new TaskInfo(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + node.writeTo(out); + out.writeLong(id); + out.writeString(type); + out.writeString(action); + out.writeOptionalString(description); + out.writeOptionalString(parentNode); + out.writeLong(parentId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("node", node.getId()); + builder.field("id", id); + builder.field("type", type); + builder.field("action", action); + if (description != null) { + builder.field("description", description); + } + if (parentNode != null) { + builder.field("parent_node", parentNode); + builder.field("parent_id", parentId); + } + builder.endObject(); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java new file mode 100644 index 00000000000..5475a394f34 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; + +/** + * + */ +public class TransportListTasksAction extends TransportTasksAction { + + @Inject + public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, ListTasksAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, ListTasksRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT); + } + + @Override + protected ListTasksResponse newResponse(ListTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions) { + return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected TaskInfo readTaskResponse(StreamInput in) throws IOException { + return new TaskInfo(in); + } + + @Override + protected TaskInfo taskOperation(ListTasksRequest request, Task task) { + return task.taskInfo(clusterService.localNode(), request.detailed()); + } + + @Override + protected boolean accumulateExceptions() { + return true; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index e454fcabc7a..f8bbebf7db8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -75,12 +76,12 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, CloseIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); if (closeIndexEnabled == false) { throw new IllegalStateException("closing indices is disabled - set [" + CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey() + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace"); } - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 82176da053c..28bf46f798f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -62,9 +63,9 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, DeleteIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 2717a2320ef..7ffb30b9534 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -65,9 +66,9 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, OpenIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 12c5f733643..9d1004ccd5c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -54,7 +54,9 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import java.util.List; import java.util.Map; @@ -77,7 +79,7 @@ public abstract class TransportSearchTypeAction extends TransportAction extends TransportAction{ protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, new TransportHandler()); } class TransportHandler implements TransportRequestHandler { @Override - public final void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { + messageReceived(request, channel); + } + + @Override + public final void messageReceived(Request request, TransportChannel channel) throws Exception { execute(request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index 07ddff3348c..3e0454550ba 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.atomic.AtomicInteger; @@ -45,15 +47,17 @@ public abstract class TransportAction execute(Request request) { @@ -63,6 +67,28 @@ public abstract class TransportAction listener) { + Task task = taskManager.register("transport", actionName, request); + if (task == null) { + execute(null, request, listener); + } else { + execute(task, request, new ActionListener() { + @Override + public void onResponse(Response response) { + taskManager.unregister(task); + listener.onResponse(response); + } + + @Override + public void onFailure(Throwable e) { + taskManager.unregister(task); + listener.onFailure(e); + } + }); + } + } + + private final void execute(Task task, Request request, ActionListener listener) { + ActionRequestValidationException validationException = request.validate(); if (validationException != null) { listener.onFailure(validationException); @@ -71,17 +97,21 @@ public abstract class TransportAction(this, logger); - requestFilterChain.proceed(actionName, request, listener); + requestFilterChain.proceed(task, actionName, request, listener); } } + protected void doExecute(Task task, Request request, ActionListener listener) { + doExecute(request, listener); + } + protected abstract void doExecute(Request request, ActionListener listener); private static class RequestFilterChain implements ActionFilterChain { @@ -96,13 +126,13 @@ public abstract class TransportAction(actionName, listener, new ResponseFilterChain(this.action.filters, logger))); + this.action.doExecute(task, (Request) request, new FilteredActionListener(actionName, listener, new ResponseFilterChain(this.action.filters, logger))); } else { listener.onFailure(new IllegalStateException("proceed was called too many times")); } @@ -131,7 +161,7 @@ public abstract class TransportAction listener) throws Exception; + /** + * Override this operation if access to the task parameter is needed + */ + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + masterOperation(request, state, listener); + } + protected boolean localExecute(Request request) { return false; } @@ -91,8 +99,14 @@ public abstract class TransportMasterNodeAction listener) { - new AsyncSingleAction(request, listener).start(); + protected final void doExecute(final Request request, ActionListener listener) { + logger.warn("attempt to execute a master node operation without task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, final Request request, ActionListener listener) { + new AsyncSingleAction(task, request, listener).start(); } class AsyncSingleAction { @@ -100,6 +114,7 @@ public abstract class TransportMasterNodeAction listener; private final Request request; private volatile ClusterStateObserver observer; + private final Task task; private final ClusterStateObserver.ChangePredicate retryableOrNoBlockPredicate = new ClusterStateObserver.ValidationPredicate() { @Override @@ -109,7 +124,8 @@ public abstract class TransportMasterNodeAction listener) { + AsyncSingleAction(Task task, Request request, ActionListener listener) { + this.task = task; this.request = request; // TODO do we really need to wrap it in a listener? the handlers should be cheap if ((listener instanceof ThreadedActionListener) == false) { @@ -157,7 +173,7 @@ public abstract class TransportMasterNodeAction listener) { - new AsyncAction(request, listener).start(); + protected final void doExecute(NodesRequest request, ActionListener listener) { + logger.warn("attempt to execute a transport nodes operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, NodesRequest request, ActionListener listener) { + new AsyncAction(task, request, listener).start(); } protected boolean transportCompress() { @@ -106,8 +114,10 @@ public abstract class TransportNodesAction listener; private final AtomicReferenceArray responses; private final AtomicInteger counter = new AtomicInteger(); + private final Task task; - private AsyncAction(NodesRequest request, ActionListener listener) { + private AsyncAction(Task task, NodesRequest request, ActionListener listener) { + this.task = task; this.request = request; this.listener = listener; ClusterState clusterState = clusterService.state(); @@ -150,7 +160,11 @@ public abstract class TransportNodesAction() { @Override public NodeResponse newInstance() { diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 80ac93e981b..6fd7da91645 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -114,7 +114,7 @@ public abstract class TransportReplicationAction request, Supplier replicaRequest, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 2a7e19bfade..47eebc9cfcd 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -66,7 +66,7 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); this.clusterService = clusterService; this.transportService = transportService; diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java new file mode 100644 index 00000000000..a1e485bb64f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.ChildTask; +import org.elasticsearch.tasks.Task; + +import java.io.IOException; + +/** + * A base class for task requests + */ +public class BaseTasksRequest extends ActionRequest { + + + public static final String[] ALL_ACTIONS = Strings.EMPTY_ARRAY; + + public static final String[] ALL_NODES = Strings.EMPTY_ARRAY; + + public static final long ALL_TASKS = -1L; + + private String[] nodesIds = ALL_NODES; + + private TimeValue timeout; + + private String[] actions = ALL_ACTIONS; + + private String parentNode; + + private long parentTaskId = ALL_TASKS; + + public BaseTasksRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + /** + * Get information about tasks from nodes based on the nodes ids specified. + * If none are passed, information for all nodes will be returned. + */ + public BaseTasksRequest(ActionRequest request, String... nodesIds) { + super(request); + this.nodesIds = nodesIds; + } + + /** + * Get information about tasks from nodes based on the nodes ids specified. + * If none are passed, information for all nodes will be returned. + */ + public BaseTasksRequest(String... nodesIds) { + this.nodesIds = nodesIds; + } + + /** + * Sets the list of action masks for the actions that should be returned + */ + @SuppressWarnings("unchecked") + public final T actions(String... actions) { + this.actions = actions; + return (T) this; + } + + /** + * Return the list of action masks for the actions that should be returned + */ + public String[] actions() { + return actions; + } + + public final String[] nodesIds() { + return nodesIds; + } + + @SuppressWarnings("unchecked") + public final T nodesIds(String... nodesIds) { + this.nodesIds = nodesIds; + return (T) this; + } + + /** + * Returns the parent node id that tasks should be filtered by + */ + public String parentNode() { + return parentNode; + } + + @SuppressWarnings("unchecked") + public T parentNode(String parentNode) { + this.parentNode = parentNode; + return (T) this; + } + + /** + * Returns the parent task id that tasks should be filtered by + */ + public long parentTaskId() { + return parentTaskId; + } + + @SuppressWarnings("unchecked") + public T parentTaskId(long parentTaskId) { + this.parentTaskId = parentTaskId; + return (T) this; + } + + + public TimeValue timeout() { + return this.timeout; + } + + @SuppressWarnings("unchecked") + public final T timeout(TimeValue timeout) { + this.timeout = timeout; + return (T) this; + } + + @SuppressWarnings("unchecked") + public final T timeout(String timeout) { + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); + return (T) this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + nodesIds = in.readStringArray(); + actions = in.readStringArray(); + parentNode = in.readOptionalString(); + parentTaskId = in.readLong(); + if (in.readBoolean()) { + timeout = TimeValue.readTimeValue(in); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArrayNullable(nodesIds); + out.writeStringArrayNullable(actions); + out.writeOptionalString(parentNode); + out.writeLong(parentTaskId); + out.writeOptionalStreamable(timeout); + } + + public boolean match(Task task) { + if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) { + return false; + } + if (parentNode() != null || parentTaskId() != BaseTasksRequest.ALL_TASKS) { + if (task instanceof ChildTask) { + if (parentNode() != null) { + if (parentNode().equals(((ChildTask) task).getParentNode()) == false) { + return false; + } + } + if (parentTaskId() != BaseTasksRequest.ALL_TASKS) { + if (parentTaskId() != ((ChildTask) task).getParentId()) { + return false; + } + } + } else { + // This is not a child task and we need to match parent node or id + return false; + } + } + return true; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java new file mode 100644 index 00000000000..43be2b46db1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + + +/** + * Base class for responses of task-related operations + */ +public class BaseTasksResponse extends ActionResponse { + private List taskFailures; + private List nodeFailures; + + public BaseTasksResponse() { + } + + public BaseTasksResponse(List taskFailures, List nodeFailures) { + this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(taskFailures)); + this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(nodeFailures)); + } + + /** + * The list of task failures exception. + */ + public List getTaskFailures() { + return taskFailures; + } + + /** + * The list of node failures exception. + */ + public List getNodeFailures() { + return nodeFailures; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + List taskFailures = new ArrayList<>(); + for (int i = 0; i < size; i++) { + taskFailures.add(new TaskOperationFailure(in)); + } + size = in.readVInt(); + this.taskFailures = Collections.unmodifiableList(taskFailures); + List nodeFailures = new ArrayList<>(); + for (int i = 0; i < size; i++) { + nodeFailures.add(new FailedNodeException(in)); + } + this.nodeFailures = Collections.unmodifiableList(nodeFailures); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(taskFailures.size()); + for (TaskOperationFailure exp : taskFailures) { + exp.writeTo(out); + } + out.writeVInt(nodeFailures.size()); + for (FailedNodeException exp : nodeFailures) { + exp.writeTo(out); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java new file mode 100644 index 00000000000..a7265ce9998 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.unit.TimeValue; + +/** + * Builder for task-based requests + */ +public class TasksRequestBuilder , Response extends BaseTasksResponse, RequestBuilder extends TasksRequestBuilder> + extends ActionRequestBuilder { + + protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { + super(client, action, request); + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setNodesIds(String... nodesIds) { + request.nodesIds(nodesIds); + return (RequestBuilder) this; + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setActions(String... actions) { + request.actions(actions); + return (RequestBuilder) this; + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setTimeout(TimeValue timeout) { + request.timeout(timeout); + return (RequestBuilder) this; + } +} + diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java new file mode 100644 index 00000000000..42be7e4eefc --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -0,0 +1,380 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.NoSuchNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ChildTaskRequest; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.NodeShouldNotConnectException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; +import java.util.function.Supplier; + +/** + * The base class for transport actions that are interacting with currently running tasks. + */ +public abstract class TransportTasksAction< + TasksRequest extends BaseTasksRequest, + TasksResponse extends BaseTasksResponse, + TaskResponse extends Writeable + > extends HandledTransportAction { + + protected final ClusterName clusterName; + protected final ClusterService clusterService; + protected final TransportService transportService; + protected final Supplier requestSupplier; + protected final Supplier responseSupplier; + + protected final String transportNodeAction; + + protected TransportTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Supplier requestSupplier, + Supplier responseSupplier, + String nodeExecutor) { + super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, requestSupplier); + this.clusterName = clusterName; + this.clusterService = clusterService; + this.transportService = transportService; + this.transportNodeAction = actionName + "[n]"; + this.requestSupplier = requestSupplier; + this.responseSupplier = responseSupplier; + + transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler()); + } + + @Override + protected final void doExecute(TasksRequest request, ActionListener listener) { + logger.warn("attempt to execute a transport tasks operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, TasksRequest request, ActionListener listener) { + new AsyncAction(task, request, listener).start(); + } + + private NodeTasksResponse nodeOperation(NodeTaskRequest nodeTaskRequest) { + TasksRequest request = nodeTaskRequest.tasksRequest; + List results = new ArrayList<>(); + List exceptions = new ArrayList<>(); + for (Task task : taskManager.getTasks().values()) { + // First check action and node filters + if (request.match(task)) { + try { + results.add(taskOperation(request, task)); + } catch (Exception ex) { + exceptions.add(new TaskOperationFailure(clusterService.localNode().id(), task.getId(), ex)); + } + } + } + return new NodeTasksResponse(clusterService.localNode().id(), results, exceptions); + } + + protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { + return nodesIds; + } + + protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) { + return clusterState.nodes().resolveNodesIds(request.nodesIds()); + } + + protected abstract TasksResponse newResponse(TasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions); + + @SuppressWarnings("unchecked") + protected TasksResponse newResponse(TasksRequest request, AtomicReferenceArray responses) { + List tasks = new ArrayList<>(); + List failedNodeExceptions = new ArrayList<>(); + List taskOperationFailures = new ArrayList<>(); + for (int i = 0; i < responses.length(); i++) { + Object response = responses.get(i); + if (response instanceof FailedNodeException) { + failedNodeExceptions.add((FailedNodeException) response); + } else { + NodeTasksResponse tasksResponse = (NodeTasksResponse) response; + if (tasksResponse.results != null) { + tasks.addAll(tasksResponse.results); + } + if (tasksResponse.exceptions != null) { + taskOperationFailures.addAll(tasksResponse.exceptions); + } + } + } + return newResponse(request, tasks, taskOperationFailures, failedNodeExceptions); + } + + protected abstract TaskResponse readTaskResponse(StreamInput in) throws IOException; + + protected abstract TaskResponse taskOperation(TasksRequest request, Task task); + + protected boolean transportCompress() { + return false; + } + + protected abstract boolean accumulateExceptions(); + + private class AsyncAction { + + private final TasksRequest request; + private final String[] nodesIds; + private final DiscoveryNode[] nodes; + private final ActionListener listener; + private final AtomicReferenceArray responses; + private final AtomicInteger counter = new AtomicInteger(); + private final Task task; + + private AsyncAction(Task task, TasksRequest request, ActionListener listener) { + this.task = task; + this.request = request; + this.listener = listener; + ClusterState clusterState = clusterService.state(); + String[] nodesIds = resolveNodes(request, clusterState); + this.nodesIds = filterNodeIds(clusterState.nodes(), nodesIds); + ImmutableOpenMap nodes = clusterState.nodes().nodes(); + this.nodes = new DiscoveryNode[nodesIds.length]; + for (int i = 0; i < nodesIds.length; i++) { + this.nodes[i] = nodes.get(nodesIds[i]); + } + this.responses = new AtomicReferenceArray<>(this.nodesIds.length); + } + + private void start() { + if (nodesIds.length == 0) { + // nothing to do + try { + listener.onResponse(newResponse(request, responses)); + } catch (Throwable t) { + logger.debug("failed to generate empty response", t); + listener.onFailure(t); + } + } else { + TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); + if (request.timeout() != null) { + builder.withTimeout(request.timeout()); + } + builder.withCompress(transportCompress()); + for (int i = 0; i < nodesIds.length; i++) { + final String nodeId = nodesIds[i]; + final int idx = i; + final DiscoveryNode node = nodes[i]; + try { + if (node == null) { + onFailure(idx, nodeId, new NoSuchNodeException(nodeId)); + } else if (!clusterService.localNode().shouldConnectTo(node) && !clusterService.localNode().equals(node)) { + // the check "!clusterService.localNode().equals(node)" is to maintain backward comp. where before + // we allowed to connect from "local" client node to itself, certain tests rely on it, if we remove it, we need to fix + // those (and they randomize the client node usage, so tricky to find when) + onFailure(idx, nodeId, new NodeShouldNotConnectException(clusterService.localNode(), node)); + } else { + NodeTaskRequest nodeRequest = new NodeTaskRequest(request); + nodeRequest.setParentTask(clusterService.localNode().id(), task.getId()); + transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), new BaseTransportResponseHandler() { + @Override + public NodeTasksResponse newInstance() { + return new NodeTasksResponse(); + } + + @Override + public void handleResponse(NodeTasksResponse response) { + onOperation(idx, response); + } + + @Override + public void handleException(TransportException exp) { + onFailure(idx, node.id(), exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + } + } catch (Throwable t) { + onFailure(idx, nodeId, t); + } + } + } + } + + private void onOperation(int idx, NodeTasksResponse nodeResponse) { + responses.set(idx, nodeResponse); + if (counter.incrementAndGet() == responses.length()) { + finishHim(); + } + } + + private void onFailure(int idx, String nodeId, Throwable t) { + if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) { + logger.debug("failed to execute on node [{}]", t, nodeId); + } + if (accumulateExceptions()) { + responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); + } + if (counter.incrementAndGet() == responses.length()) { + finishHim(); + } + } + + private void finishHim() { + TasksResponse finalResponse; + try { + finalResponse = newResponse(request, responses); + } catch (Throwable t) { + logger.debug("failed to combine responses from nodes", t); + listener.onFailure(t); + return; + } + listener.onResponse(finalResponse); + } + } + + class NodeTransportHandler implements TransportRequestHandler { + + @Override + public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception { + channel.sendResponse(nodeOperation(request)); + } + } + + + private class NodeTaskRequest extends ChildTaskRequest { + private TasksRequest tasksRequest; + + protected NodeTaskRequest() { + super(); + } + + protected NodeTaskRequest(TasksRequest tasksRequest) { + super(tasksRequest); + this.tasksRequest = tasksRequest; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tasksRequest = requestSupplier.get(); + tasksRequest.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + tasksRequest.writeTo(out); + } + } + + private class NodeTasksResponse extends TransportResponse { + protected String nodeId; + protected List exceptions; + protected List results; + + public NodeTasksResponse() { + } + + public NodeTasksResponse(String nodeId, + List results, + List exceptions) { + this.nodeId = nodeId; + this.results = results; + this.exceptions = exceptions; + } + + public String getNodeId() { + return nodeId; + } + + public List getExceptions() { + return exceptions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + nodeId = in.readString(); + int resultsSize = in.readVInt(); + results = new ArrayList<>(resultsSize); + for (; resultsSize > 0; resultsSize--) { + final TaskResponse result = in.readBoolean() ? readTaskResponse(in) : null; + results.add(result); + } + if (in.readBoolean()) { + int taskFailures = in.readVInt(); + exceptions = new ArrayList<>(taskFailures); + for (int i = 0; i < taskFailures; i++) { + exceptions.add(new TaskOperationFailure(in)); + } + } else { + exceptions = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(nodeId); + out.writeVInt(results.size()); + for (TaskResponse result : results) { + if (result != null) { + out.writeBoolean(true); + result.writeTo(out); + } else { + out.writeBoolean(false); + } + } + out.writeBoolean(exceptions != null); + if (exceptions != null) { + int taskFailures = exceptions.size(); + out.writeVInt(taskFailures); + for (TaskOperationFailure exception : exceptions) { + exception.writeTo(out); + } + } + } + } +} diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 947e0f9b526..2cee4341a39 100644 --- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -33,6 +33,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; @@ -249,6 +252,29 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds); + /** + * List tasks + * + * @param request The nodes tasks request + * @return The result future + * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + */ + ActionFuture listTasks(ListTasksRequest request); + + /** + * List active tasks + * + * @param request The nodes tasks request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + */ + void listTasks(ListTasksRequest request, ActionListener listener); + + /** + * List active tasks + */ + ListTasksRequestBuilder prepareListTasks(String... nodesIds); + /** * Returns list of shards the given search would be executed on. */ diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index 2640618f1bc..7fb6c5c2de0 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -22,6 +22,7 @@ package org.elasticsearch.client; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -404,6 +405,27 @@ public class Requests { return new ClusterStatsRequest(); } + /** + * Creates a nodes tasks request against all the nodes. + * + * @return The nodes tasks request + * @see org.elasticsearch.client.ClusterAdminClient#listTasks(ListTasksRequest) + */ + public static ListTasksRequest listTasksRequest() { + return new ListTasksRequest(); + } + + /** + * Creates a nodes tasks request against one or more nodes. Pass null or an empty array for all nodes. + * + * @param nodesIds The nodes ids to get the tasks for + * @return The nodes tasks request + * @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest) + */ + public static ListTasksRequest listTasksRequest(String... nodesIds) { + return new ListTasksRequest(nodesIds); + } + /** * Registers snapshot repository * diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 8c0fe125135..e085c8da075 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -41,6 +41,10 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; @@ -968,6 +972,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new NodesHotThreadsRequestBuilder(this, NodesHotThreadsAction.INSTANCE).setNodesIds(nodesIds); } + @Override + public ActionFuture listTasks(final ListTasksRequest request) { + return execute(ListTasksAction.INSTANCE, request); + } + + @Override + public void listTasks(final ListTasksRequest request, final ActionListener listener) { + execute(ListTasksAction.INSTANCE, request, listener); + } + + @Override + public ListTasksRequestBuilder prepareListTasks(String... nodesIds) { + return new ListTasksRequestBuilder(this, ListTasksAction.INSTANCE).setNodesIds(nodesIds); + } + @Override public ActionFuture searchShards(final ClusterSearchShardsRequest request) { return execute(ClusterSearchShardsAction.INSTANCE, request); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java index b682b0cc61d..12845fa3fa4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -148,4 +149,9 @@ public interface ClusterService extends LifecycleComponent { * @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue */ TimeValue getMaxTaskWaitTime(); + + /** + * Returns task manager created in the cluster service + */ + TaskManager getTaskManager(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 5fc013b6633..5e945d372fe 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -133,6 +134,8 @@ public class InternalClusterService extends AbstractLifecycleComponent List readList(StreamInputReader reader) throws IOException { + int count = readVInt(); + List builder = new ArrayList<>(count); + for (int i=0; i { + T read(StreamInput t) throws IOException; +} diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index ad02a0fd525..b423841acd0 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -667,4 +667,14 @@ public abstract class StreamOutput extends OutputStream { writeDouble(geoPoint.lat()); writeDouble(geoPoint.lon()); } + + /** + * Writes a list of {@link Writeable} objects + */ + public > void writeList(List list) throws IOException { + writeVInt(list.size()); + for (T obj: list) { + obj.writeTo(this); + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index f7eab3da2ac..12e22a7693b 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -36,6 +36,7 @@ import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthActio import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction; +import org.elasticsearch.rest.action.admin.cluster.node.tasks.RestListTasksAction; import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; @@ -259,7 +260,10 @@ public class NetworkModule extends AbstractModule { RestFieldStatsAction.class, // no abstract cat action - RestCatAction.class + RestCatAction.class, + + // Tasks API + RestListTasksAction.class ); private static final List> builtinCatHandlers = Arrays.asList( diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java new file mode 100644 index 00000000000..813c7822428 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster.node.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestToXContentListener; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + + +public class RestListTasksAction extends BaseRestHandler { + + @Inject + public RestListTasksAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(GET, "/_tasks", this); + controller.registerHandler(GET, "/_tasks/{nodeId}", this); + controller.registerHandler(GET, "/_tasks/{nodeId}/{actions}", this); + } + + @Override + public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { + boolean detailed = request.paramAsBoolean("detailed", false); + String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + String[] actions = Strings.splitStringByCommaToArray(request.param("actions")); + String parentNode = request.param("parent_node"); + long parentTaskId = request.paramAsLong("parent_task", ListTasksRequest.ALL_TASKS); + + ListTasksRequest listTasksRequest = new ListTasksRequest(nodesIds); + listTasksRequest.detailed(detailed); + listTasksRequest.actions(actions); + listTasksRequest.parentNode(parentNode); + listTasksRequest.parentTaskId(parentTaskId); + client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/core/src/main/java/org/elasticsearch/tasks/ChildTask.java b/core/src/main/java/org/elasticsearch/tasks/ChildTask.java new file mode 100644 index 00000000000..14d49baf398 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/ChildTask.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.inject.Provider; + +/** + * Child task + */ +public class ChildTask extends Task { + + private final String parentNode; + + private final long parentId; + + public ChildTask(long id, String type, String action, Provider description, String parentNode, long parentId) { + super(id, type, action, description); + this.parentNode = parentNode; + this.parentId = parentId; + } + + /** + * Returns parent node of the task or null if task doesn't have any parent tasks + */ + public String getParentNode() { + return parentNode; + } + + /** + * Returns id of the parent task or -1L if task doesn't have any parent tasks + */ + public long getParentId() { + return parentId; + } + + public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) { + return new TaskInfo(node, getId(), getType(), getAction(), detailed ? getDescription() : null, parentNode, parentId); + } +} diff --git a/core/src/main/java/org/elasticsearch/tasks/Task.java b/core/src/main/java/org/elasticsearch/tasks/Task.java new file mode 100644 index 00000000000..9e925b09d1a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/Task.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.inject.Provider; + +/** + * Current task information + */ +public class Task { + + private final long id; + + private final String type; + + private final String action; + + private final Provider description; + + public Task(long id, String type, String action, Provider description) { + this.id = id; + this.type = type; + this.action = action; + this.description = description; + } + + public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) { + return new TaskInfo(node, id, type, action, detailed ? getDescription() : null); + } + + /** + * Returns task id + */ + public long getId() { + return id; + } + + /** + * Returns task channel type (netty, transport, direct) + */ + public String getType() { + return type; + } + + /** + * Returns task action + */ + public String getAction() { + return action; + } + + /** + * Generates task description + */ + public String getDescription() { + return description.get(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java new file mode 100644 index 00000000000..a4bf118f024 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tasks; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; +import org.elasticsearch.transport.TransportRequest; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Task Manager service for keeping track of currently running tasks on the nodes + */ +public class TaskManager extends AbstractComponent { + + private final ConcurrentMapLong tasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); + + private final AtomicLong taskIdGenerator = new AtomicLong(); + + public TaskManager(Settings settings) { + super(settings); + } + + /** + * Registers a task without parent task + */ + public Task register(String type, String action, TransportRequest request) { + Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action); + if (task != null) { + if (logger.isDebugEnabled()) { + logger.debug("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); + } + Task previousTask = tasks.put(task.getId(), task); + assert previousTask == null; + } + return task; + } + + /** + * Unregister the task + */ + public void unregister(Task task) { + logger.debug("unregister task for id: {}", task.getId()); + tasks.remove(task.getId()); + } + + /** + * Returns the list of currently running tasks on the node + */ + public Map getTasks() { + return Collections.unmodifiableMap(new HashMap<>(tasks)); + } + +} diff --git a/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java new file mode 100644 index 00000000000..f6b178dba8d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import java.io.IOException; + +/** + * Wrapper around transport channel that delegates all requests to the + * underlying channel + */ +public class DelegatingTransportChannel implements TransportChannel { + + private final TransportChannel channel; + + protected DelegatingTransportChannel(TransportChannel channel) { + this.channel = channel; + } + + @Override + public String action() { + return channel.action(); + } + + @Override + public String getProfileName() { + return channel.getProfileName(); + } + + @Override + public long getRequestId() { + return channel.getRequestId(); + } + + @Override + public String getChannelType() { + return channel.getChannelType(); + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + channel.sendResponse(response); + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + channel.sendResponse(response, options); + } + + @Override + public void sendResponse(Throwable error) throws IOException { + channel.sendResponse(error); + } + + public TransportChannel getChannel() { + return channel; + } +} diff --git a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 79bf97b908d..e58df27644e 100644 --- a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -19,7 +19,10 @@ package org.elasticsearch.transport; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; +import java.io.IOException; import java.util.function.Supplier; /** @@ -32,14 +35,16 @@ public class RequestHandlerRegistry { private final boolean forceExecution; private final String executor; private final Supplier requestFactory; + private final TaskManager taskManager; - public RequestHandlerRegistry(String action, Supplier requestFactory, TransportRequestHandler handler, String executor, boolean forceExecution) { + public RequestHandlerRegistry(String action, Supplier requestFactory, TaskManager taskManager, TransportRequestHandler handler, String executor, boolean forceExecution) { this.action = action; this.requestFactory = requestFactory; assert newRequest() != null; this.handler = handler; this.forceExecution = forceExecution; this.executor = executor; + this.taskManager = taskManager; } public String getAction() { @@ -50,8 +55,21 @@ public class RequestHandlerRegistry { return requestFactory.get(); } - public TransportRequestHandler getHandler() { - return handler; + public void processMessageReceived(Request request, TransportChannel channel) throws Exception { + final Task task = taskManager.register(channel.getChannelType(), action, request); + if (task == null) { + handler.messageReceived(request, channel); + } else { + boolean success = false; + try { + handler.messageReceived(request, new TransportChannelWrapper(taskManager, task, channel), task); + success = true; + } finally { + if (success == false) { + taskManager.unregister(task); + } + } + } } public boolean isForceExecution() { @@ -61,4 +79,44 @@ public class RequestHandlerRegistry { public String getExecutor() { return executor; } + + @Override + public String toString() { + return handler.toString(); + } + + private static class TransportChannelWrapper extends DelegatingTransportChannel { + + private final Task task; + + private final TaskManager taskManager; + + public TransportChannelWrapper(TaskManager taskManager, Task task, TransportChannel channel) { + super(channel); + this.task = task; + this.taskManager = taskManager; + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + endTask(); + super.sendResponse(response); + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + endTask(); + super.sendResponse(response, options); + } + + @Override + public void sendResponse(Throwable error) throws IOException { + endTask(); + super.sendResponse(error); + } + + private void endTask() { + taskManager.unregister(task); + } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportChannel.java b/core/src/main/java/org/elasticsearch/transport/TransportChannel.java index 4c7678d60f0..53fd4ebe91e 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportChannel.java @@ -30,6 +30,10 @@ public interface TransportChannel { String getProfileName(); + long getRequestId(); + + String getChannelType(); + void sendResponse(TransportResponse response) throws IOException; void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java index ddf54179476..d5c1491f1a6 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java @@ -19,6 +19,8 @@ package org.elasticsearch.transport; +import org.elasticsearch.tasks.Task; + /** */ public abstract class TransportRequest extends TransportMessage { @@ -43,4 +45,14 @@ public abstract class TransportRequest extends TransportMessage { + /** + * Override this method if access to the Task parameter is needed + */ + default void messageReceived(final T request, final TransportChannel channel, Task task) throws Exception { + messageReceived(request, channel); + } + void messageReceived(T request, TransportChannel channel) throws Exception; } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 709323cb305..5d74c4a408f 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -66,6 +67,7 @@ public class TransportService extends AbstractLifecycleComponent requestHandlers = Collections.emptyMap(); final Object requestHandlerMutex = new Object(); @@ -114,6 +116,7 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier requestFactory, String executor, TransportRequestHandler handler) { - RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, requestFactory, handler, executor, false); + RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, requestFactory, taskManager, handler, executor, false); registerRequestHandler(reg); } @@ -404,7 +411,7 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier request, String executor, boolean forceExecution, TransportRequestHandler handler) { - RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, request, handler, executor, forceExecution); + RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, request, taskManager, handler, executor, forceExecution); registerRequestHandler(reg); } @@ -413,7 +420,7 @@ public class TransportService extends AbstractLifecycleComponent implem request.readFrom(stream); if (ThreadPool.Names.SAME.equals(reg.getExecutor())) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java index e6dfa97b613..e1e85e9a12f 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java @@ -106,6 +106,16 @@ public class LocalTransportChannel implements TransportChannel { sourceTransportServiceAdapter.onResponseSent(requestId, action, error); } + @Override + public long getRequestId() { + return requestId; + } + + @Override + public String getChannelType() { + return "local"; + } + private void writeResponseExceptionHeader(BytesStreamOutput stream) throws IOException { stream.writeLong(requestId); byte status = 0; diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 99ce5faa086..8df17f73233 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -255,7 +255,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { request.readFrom(buffer); if (ThreadPool.Names.SAME.equals(reg.getExecutor())) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new RequestHandler(reg, request, transportChannel)); } @@ -310,7 +310,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { @SuppressWarnings({"unchecked"}) @Override protected void doRun() throws Exception { - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java index edfe9f39b48..aaf33c2fd5a 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java @@ -132,6 +132,16 @@ public class NettyTransportChannel implements TransportChannel { transportServiceAdapter.onResponseSent(requestId, action, error); } + @Override + public long getRequestId() { + return requestId; + } + + @Override + public String getChannelType() { + return "netty"; + } + /** * Returns the underlying netty channel. This method is intended be used for access to netty to get additional * details when processing the request and may be used by plugins. Responses should be sent using the methods diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java new file mode 100644 index 00000000000..4228c9fa699 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.node.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.test.ESIntegTestCase; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +/** + * Integration tests for task management API + */ +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +public class TasksIT extends ESIntegTestCase { + + public void testTaskCounts() { + // Run only on data nodes + ListTasksResponse response = client().admin().cluster().prepareListTasks("data:true").setActions(ListTasksAction.NAME + "[n]").get(); + assertThat(response.getTasks().size(), greaterThanOrEqualTo(cluster().numDataNodes())); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java new file mode 100644 index 00000000000..55c10aa298e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -0,0 +1,664 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.node.tasks; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.ChildTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReferenceArray; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.not; + +public class TransportTasksActionTests extends ESTestCase { + + private static ThreadPool threadPool; + private static final ClusterName clusterName = new ClusterName("test-cluster"); + private TestNode[] testNodes; + private int nodesCount; + + @BeforeClass + public static void beforeClass() { + threadPool = new ThreadPool(TransportTasksActionTests.class.getSimpleName()); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + + @Before + public final void setupTestNodes() throws Exception { + nodesCount = randomIntBetween(2, 10); + testNodes = new TestNode[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + testNodes[i] = new TestNode("node" + i, threadPool, Settings.EMPTY); + } + } + + @After + public final void shutdownTestNodes() throws Exception { + for (TestNode testNode : testNodes) { + testNode.close(); + } + } + + private static class TestNode implements Releasable { + public TestNode(String name, ThreadPool threadPool, Settings settings) { + clusterService = new TestClusterService(threadPool); + transportService = new TransportService(Settings.EMPTY, + new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry()), + threadPool); + transportService.start(); + discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT); + transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService, + new ActionFilters(Collections.emptySet()), new IndexNameExpressionResolver(settings)); + } + + public final TestClusterService clusterService; + public final TransportService transportService; + public final DiscoveryNode discoveryNode; + public final TransportListTasksAction transportListTasksAction; + + @Override + public void close() { + transportService.close(); + } + } + + public static void connectNodes(TestNode... nodes) { + DiscoveryNode[] discoveryNodes = new DiscoveryNode[nodes.length]; + for (int i = 0; i < nodes.length; i++) { + discoveryNodes[i] = nodes[i].discoveryNode; + } + DiscoveryNode master = discoveryNodes[0]; + for (TestNode node : nodes) { + node.clusterService.setState(ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes)); + } + for (TestNode nodeA : nodes) { + for (TestNode nodeB : nodes) { + nodeA.transportService.connectToNode(nodeB.discoveryNode); + } + } + } + + public static class NodeRequest extends BaseNodeRequest { + protected String requestName; + private boolean enableTaskManager; + + public NodeRequest() { + super(); + } + + public NodeRequest(NodesRequest request, String nodeId) { + super(request, nodeId); + requestName = request.requestName; + enableTaskManager = request.enableTaskManager; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + requestName = in.readString(); + enableTaskManager = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + out.writeBoolean(enableTaskManager); + } + + @Override + public String getDescription() { + return "NodeRequest[" + requestName + ", " + enableTaskManager + "]"; + } + + @Override + public Task createTask(long id, String type, String action) { + if (enableTaskManager) { + return super.createTask(id, type, action); + } else { + return null; + } + } + } + + public static class NodesRequest extends BaseNodesRequest { + private String requestName; + private boolean enableTaskManager; + + private NodesRequest() { + super(); + } + + public NodesRequest(String requestName, String... nodesIds) { + this(requestName, true, nodesIds); + } + + public NodesRequest(String requestName, boolean enableTaskManager, String... nodesIds) { + super(nodesIds); + this.requestName = requestName; + this.enableTaskManager = enableTaskManager; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + requestName = in.readString(); + enableTaskManager = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + out.writeBoolean(enableTaskManager); + } + + @Override + public String getDescription() { + return "NodesRequest[" + requestName + ", " + enableTaskManager + "]"; + } + + @Override + public Task createTask(long id, String type, String action) { + if (enableTaskManager) { + return super.createTask(id, type, action); + } else { + return null; + } + } + } + + static class NodeResponse extends BaseNodeResponse { + + protected NodeResponse() { + super(); + } + + protected NodeResponse(DiscoveryNode node) { + super(node); + } + } + + static class NodesResponse extends BaseNodesResponse { + + private int failureCount; + + protected NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) { + super(clusterName, nodes); + this.failureCount = failureCount; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + failureCount = in.readVInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(failureCount); + } + + public int failureCount() { + return failureCount; + } + } + + /** + * Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager + */ + abstract class TestNodesAction extends TransportNodesAction { + + TestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService) { + super(settings, actionName, clusterName, threadPool, clusterService, transportService, + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC); + } + + @Override + protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) { + final List nodesList = new ArrayList<>(); + int failureCount = 0; + for (int i = 0; i < responses.length(); i++) { + Object resp = responses.get(i); + if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones + nodesList.add((NodeResponse) resp); + } else if (resp instanceof FailedNodeException) { + failureCount++; + } else { + logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp); + } + } + return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount); + } + + @Override + protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { + return new NodeRequest(request, nodeId); + } + + @Override + protected NodeResponse newNodeResponse() { + return new NodeResponse(); + } + + @Override + protected abstract NodeResponse nodeOperation(NodeRequest request); + + @Override + protected boolean accumulateExceptions() { + return true; + } + } + + static class TestTaskResponse implements Writeable { + + private final String status; + + public TestTaskResponse(StreamInput in) throws IOException { + status = in.readString(); + } + + public TestTaskResponse(String status) { + this.status = status; + } + + public String getStatus() { + return status; + } + + @Override + public TestTaskResponse readFrom(StreamInput in) throws IOException { + return new TestTaskResponse(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(status); + } + } + + + static class TestTasksRequest extends BaseTasksRequest { + + } + + static class TestTasksResponse extends BaseTasksResponse { + + private List tasks; + + public TestTasksResponse() { + + } + + public TestTasksResponse(List tasks, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int taskCount = in.readVInt(); + List builder = new ArrayList<>(); + for (int i = 0; i < taskCount; i++) { + builder.add(new TestTaskResponse(in)); + } + tasks = Collections.unmodifiableList(builder); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(tasks.size()); + for (TestTaskResponse task : tasks) { + task.writeTo(out); + } + } + } + + /** + * Test class for testing task operations + */ + static abstract class TestTasksAction extends TransportTasksAction { + + protected TestTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService) { + super(settings, actionName, clusterName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + } + + @Override + protected TestTasksResponse newResponse(TestTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions) { + return new TestTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected TestTaskResponse readTaskResponse(StreamInput in) throws IOException { + return new TestTaskResponse(in); + } + + @Override + protected boolean accumulateExceptions() { + return true; + } + } + + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch) throws InterruptedException { + return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request")); + } + + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + CountDownLatch actionLatch = new CountDownLatch(nodesCount); + TestNodesAction[] actions = new TestNodesAction[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected NodeResponse nodeOperation(NodeRequest request) { + logger.info("Action on node " + node); + actionLatch.countDown(); + try { + checkLatch.await(); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } + logger.info("Action on node " + node + " finished"); + return new NodeResponse(testNodes[node].discoveryNode); + } + }; + } + // Make sure no tasks are running + for (TestNode node : testNodes) { + assertEquals(0, node.transportService.getTaskManager().getTasks().size()); + } + ActionFuture future = actions[0].execute(request); + logger.info("Awaiting for all actions to start"); + actionLatch.await(); + logger.info("Done waiting for all actions to start"); + return future; + } + + public void testRunningTasksCount() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + // Check task counts using taskManager + Map localTasks = testNodes[0].transportService.getTaskManager().getTasks(); + assertEquals(2, localTasks.size()); // all node tasks + 1 coordinating task + Task coordinatingTask = localTasks.get(Collections.min(localTasks.keySet())); + Task subTask = localTasks.get(Collections.max(localTasks.keySet())); + assertThat(subTask.getAction(), endsWith("[n]")); + assertThat(coordinatingTask.getAction(), not(endsWith("[n]"))); + for (int i = 1; i < testNodes.length; i++) { + Map remoteTasks = testNodes[i].transportService.getTaskManager().getTasks(); + assertEquals(1, remoteTasks.size()); + Task remoteTask = remoteTasks.values().iterator().next(); + assertThat(remoteTask.getAction(), endsWith("[n]")); + } + + // Check task counts using transport + int testNodeNum = randomIntBetween(0, testNodes.length - 1); + TestNode testNode = testNodes[testNodeNum]; + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction*"); // pick all test actions + logger.info("Listing currently running tasks using node [{}]", testNodeNum); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + logger.info("Checking currently running tasks"); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + + // Coordinating node + assertEquals(2, response.getPerNodeTasks().get(testNodes[0].discoveryNode).size()); + // Other nodes node + for (int i = 1; i < testNodes.length; i++) { + assertEquals(1, response.getPerNodeTasks().get(testNodes[i].discoveryNode).size()); + } + + // Check task counts using transport with filtering + testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction[n]"); // only pick node actions + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertNull(entry.getValue().get(0).getDescription()); + } + + // Check task counts using transport with detailed description + listTasksRequest.detailed(true); // same request only with detailed description + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + + // Make sure that we don't have any lingering tasks + for (TestNode node : testNodes) { + assertEquals(0, node.transportService.getTaskManager().getTasks().size()); + } + } + + public void testFindChildTasks() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + + // Get the parent task + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction"); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(1, response.getTasks().size()); + String parentNode = response.getTasks().get(0).getNode().getId(); + long parentTaskId = response.getTasks().get(0).getId(); + + // Find tasks with common parent + listTasksRequest = new ListTasksRequest(); + listTasksRequest.parentNode(parentNode); + listTasksRequest.parentTaskId(parentTaskId); + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getTasks().size()); + for (TaskInfo task : response.getTasks()) { + assertEquals("testAction[n]", task.getAction()); + assertEquals(parentNode, task.getParentNode()); + assertEquals(parentTaskId, task.getParentId()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testTaskManagementOptOut() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + // Starting actions that disable task manager + ActionFuture future = startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request", false)); + + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + + // Get the parent task + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction*"); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(0, response.getTasks().size()); + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testTasksDescriptions() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + // Check task counts using transport with filtering + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction[n]"); // only pick node actions + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertNull(entry.getValue().get(0).getDescription()); + } + + // Check task counts using transport with detailed description + listTasksRequest.detailed(true); // same request only with detailed description + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException { + connectNodes(testNodes); + TestNodesAction[] actions = new TestNodesAction[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected NodeResponse nodeOperation(NodeRequest request) { + logger.info("Action on node " + node); + throw new RuntimeException("Test exception"); + } + }; + } + + for (TestNode testNode : testNodes) { + assertEquals(0, testNode.transportService.getTaskManager().getTasks().size()); + } + NodesRequest request = new NodesRequest("Test Request"); + NodesResponse responses = actions[0].execute(request).get(); + assertEquals(nodesCount, responses.failureCount()); + } + + public void testTaskLevelActionFailures() throws ExecutionException, InterruptedException, IOException { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + TestTasksAction[] tasksActions = new TestTasksAction[nodesCount]; + final int failTaskOnNode = randomIntBetween(1, nodesCount - 1); + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + // Simulate task action that fails on one of the tasks on one of the nodes + tasksActions[i] = new TestTasksAction(Settings.EMPTY, "testTasksAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) { + logger.info("Task action on node " + node); + if (failTaskOnNode == node && ((ChildTask) task).getParentNode() != null) { + logger.info("Failing on node " + node); + throw new RuntimeException("Task level failure"); + } + return new TestTaskResponse("Success on node " + node); + } + }; + } + + // Run task action on node tasks that are currently running + // should be successful on all nodes except one + TestTasksRequest testTasksRequest = new TestTasksRequest(); + testTasksRequest.actions("testAction[n]"); // pick all test actions + TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get(); + // Get successful responses from all nodes except one + assertEquals(testNodes.length - 1, response.tasks.size()); + assertEquals(1, response.getTaskFailures().size()); // one task failed + assertThat(response.getTaskFailures().get(0).getReason(), containsString("Task level failure")); + assertEquals(0, response.getNodeFailures().size()); // no nodes failed + + // Release all node tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index f21013b7fbe..6a14989be1a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -67,7 +69,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -147,7 +149,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -218,9 +220,9 @@ public class TransportActionFilterChainTests extends ESTestCase { RequestTestFilter testFilter = new RequestTestFilter(randomInt(), new RequestCallback() { @Override - public void execute(final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) { + public void execute(Task task, final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) { for (int i = 0; i <= additionalContinueCount; i++) { - actionFilterChain.proceed(action, actionRequest, actionListener); + actionFilterChain.proceed(task, action, actionRequest, actionListener); } } }); @@ -230,7 +232,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -286,7 +288,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -344,11 +346,11 @@ public class TransportActionFilterChainTests extends ESTestCase { @SuppressWarnings("unchecked") @Override - public void apply(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void apply(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { this.runs.incrementAndGet(); this.lastActionName = action; this.executionToken = counter.incrementAndGet(); - this.callback.execute(action, actionRequest, actionListener, actionFilterChain); + this.callback.execute(task, action, actionRequest, actionListener, actionFilterChain); } @Override @@ -375,8 +377,8 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - chain.proceed(action, request, listener); + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + chain.proceed(task, action, request, listener); } @Override @@ -391,20 +393,20 @@ public class TransportActionFilterChainTests extends ESTestCase { private static enum RequestOperation implements RequestCallback { CONTINUE_PROCESSING { @Override - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { - actionFilterChain.proceed(action, actionRequest, actionListener); + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + actionFilterChain.proceed(task, action, actionRequest, actionListener); } }, LISTENER_RESPONSE { @Override @SuppressWarnings("unchecked") - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { actionListener.onResponse(new TestResponse()); } }, LISTENER_FAILURE { @Override - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { actionListener.onFailure(new ElasticsearchTimeoutException("")); } } @@ -433,7 +435,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } private static interface RequestCallback { - void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain); + void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain); } private static interface ResponseCallback { diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 6f5be649451..e4a1a9deed9 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -470,5 +470,16 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { @Override public void sendResponse(Throwable error) throws IOException { } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "test"; + } + } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 104c94db0c2..980558c2716 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; @@ -119,9 +120,9 @@ public class TransportMasterNodeActionTests extends ESTestCase { } @Override - protected void doExecute(final Request request, ActionListener listener) { + protected void doExecute(Task task, final Request request, ActionListener listener) { // remove unneeded threading by wrapping listener with SAME to prevent super.doExecute from wrapping it with LISTENER - super.doExecute(request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); + super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); } @Override @@ -159,7 +160,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { if (masterOperationFailure) { listener.onFailure(exception); } else { diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index fc419128b7c..fdcf4b07245 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -862,6 +862,16 @@ public class TransportReplicationActionTests extends ESTestCase { public void sendResponse(Throwable error) throws IOException { listener.onFailure(error); } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "replica_test"; + } }; } diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index e93fbc8e14a..e7ba8de0f97 100644 --- a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -29,6 +29,8 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; @@ -61,7 +63,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, null); + super(settings, actionName, threadPool, EMPTY_FILTERS, null, new TaskManager(settings)); } @Override diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index f452bb5c36c..f127ae28378 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.Transport; diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 093e46186b3..72ace64d9ee 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 0637ae7de8e..6faa02e16d7 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -886,5 +886,15 @@ public class PublishClusterStateActionTests extends ESTestCase { this.error.set(error); assertThat(response.get(), nullValue()); } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "capturing"; + } } } diff --git a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java index 1d1ad8d5ba9..d3de3ce59fb 100644 --- a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java @@ -33,4 +33,4 @@ public class SimpleLocalTransportTests extends AbstractSimpleTransportTestCase { transportService.start(); return transportService; } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index 78caef498d1..55f9bc49df3 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -146,7 +146,7 @@ public class NettyTransportIT extends ESIntegTestCase { } if (reg.getExecutor() == ThreadPool.Names.SAME) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new RequestHandler(reg, request, transportChannel)); } @@ -176,7 +176,7 @@ public class NettyTransportIT extends ESIntegTestCase { @SuppressWarnings({"unchecked"}) @Override protected void doRun() throws Exception { - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java index 89702118b49..bd26319f4ab 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java @@ -36,6 +36,7 @@ import java.net.UnknownHostException; import static org.hamcrest.Matchers.containsString; public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { + @Override protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) { settings = Settings.builder().put(settings).put("transport.tcp.port", "0").build(); @@ -53,4 +54,4 @@ public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { assertThat(e.getMessage(), containsString("[localhost/127.0.0.1:9876]")); } } -} \ No newline at end of file +} diff --git a/docs/reference/tasks/list.asciidoc b/docs/reference/tasks/list.asciidoc new file mode 100644 index 00000000000..bfd7f12c43f --- /dev/null +++ b/docs/reference/tasks/list.asciidoc @@ -0,0 +1,46 @@ +[[tasks-list]] +== Tasks List + +The task management API allows to retrieve information about currently running tasks. + +[source,js] +-------------------------------------------------- +curl -XGET 'http://localhost:9200/_tasks' +curl -XGET 'http://localhost:9200/_tasks/nodeId1,nodeId2' +curl -XGET 'http://localhost:9200/_tasks/nodeId1,nodeId2/cluster:*' +-------------------------------------------------- + +The first command retrieves all tasks currently running on all nodes. +The second command selectively retrieves tasks from nodes +`nodeId1` and `nodeId2`. All the nodes selective options are explained +<>. +The third command retrieves all cluster-related tasks running on nodes `nodeId1` and `nodeId2`. + +The result will look similar to: + +[source,js] +-------------------------------------------------- +{ + "nodes" : { + "fDlEl7PrQi6F-awHZ3aaDw" : { + "name" : "Gazer", + "transport_address" : "127.0.0.1:9300", + "host" : "127.0.0.1", + "ip" : "127.0.0.1:9300", + "tasks" : [ { + "node" : "fDlEl7PrQi6F-awHZ3aaDw", + "id" : 105, + "type" : "transport", + "action" : "cluster:monitor/nodes/tasks" + }, { + "node" : "fDlEl7PrQi6F-awHZ3aaDw", + "id" : 106, + "type" : "direct", + "action" : "cluster:monitor/nodes/tasks[n]", + "parent_node" : "fDlEl7PrQi6F-awHZ3aaDw", + "parent_id" : 105 + } ] + } + } +} +-------------------------------------------------- diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 516514599ae..9a3a4632c6f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -92,6 +92,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.action.SearchServiceTransportAction; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java index e3faeb1badc..fa83fb4fd3d 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java @@ -239,6 +239,6 @@ public class DeleteByQueryRequest extends ActionRequest im ", size:" + size + ", timeout:" + timeout + ", routing:" + routing + - ", query:" + query.toString(); + ", query:" + query; } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json new file mode 100644 index 00000000000..02acf10d1f7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -0,0 +1,35 @@ +{ + "tasks.list": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-list.html", + "methods": ["GET"], + "url": { + "path": "/_tasks", + "paths": ["/_tasks", "/_tasks/{node_id}", "/_tasks/{node_id}/{actions}"], + "parts": { + "node_id": { + "type": "list", + "description": "A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes" + }, + "actions": { + "type": "list", + "description": "A comma-separated list of actions that should be returned. Leave empty to return all." + } + }, + "params": { + "detailed": { + "type": "boolean", + "description": "Return detailed task information (default: false)" + }, + "parent_node": { + "type": "string", + "description": "Return tasks with specified parent node." + }, + "parent_task": { + "type" : "number", + "description" : "Return tasks with specified parent task id. Set to -1 to return all." + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml new file mode 100644 index 00000000000..252649abbb6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml @@ -0,0 +1,6 @@ +--- +"tasks_list test": + - do: + tasks.list: {} + + - is_true: nodes diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java index 06def24d6f3..a19d19dcf47 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -147,6 +148,11 @@ public class NoopClusterService implements ClusterService { return TimeValue.timeValueMillis(0); } + @Override + public TaskManager getTaskManager() { + return null; + } + @Override public Lifecycle.State lifecycleState() { return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index 93327213bbc..92b5f9a584b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; @@ -60,6 +61,7 @@ import java.util.concurrent.ScheduledFuture; public class TestClusterService implements ClusterService { volatile ClusterState state; + private volatile TaskManager taskManager; private final List listeners = new CopyOnWriteArrayList<>(); private final Queue onGoingTimeouts = ConcurrentCollections.newQueue(); private final ThreadPool threadPool; @@ -72,6 +74,7 @@ public class TestClusterService implements ClusterService { public TestClusterService(ThreadPool threadPool) { this(ClusterState.builder(new ClusterName("test")).build(), threadPool); + taskManager = new TaskManager(Settings.EMPTY); } public TestClusterService(ClusterState state) { @@ -230,6 +233,11 @@ public class TestClusterService implements ClusterService { throw new UnsupportedOperationException(); } + @Override + public TaskManager getTaskManager() { + return taskManager; + } + @Override public List pendingTasks() { throw new UnsupportedOperationException(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index d636341e42f..0a8869b20cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; From 9ca4386fba11131f98db0336853b4cd7d80b0143 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 5 Jan 2016 15:15:02 -0500 Subject: [PATCH 321/322] Lower logging level for registering/unregistering tasks with the task manager At the debug level the task manager is too talkative. --- core/src/main/java/org/elasticsearch/tasks/TaskManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java index a4bf118f024..68e2dcbe9a5 100644 --- a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -49,8 +49,8 @@ public class TaskManager extends AbstractComponent { public Task register(String type, String action, TransportRequest request) { Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action); if (task != null) { - if (logger.isDebugEnabled()) { - logger.debug("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); + if (logger.isTraceEnabled()) { + logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); } Task previousTask = tasks.put(task.getId(), task); assert previousTask == null; @@ -62,7 +62,7 @@ public class TaskManager extends AbstractComponent { * Unregister the task */ public void unregister(Task task) { - logger.debug("unregister task for id: {}", task.getId()); + logger.trace("unregister task for id: {}", task.getId()); tasks.remove(task.getId()); } From 05c46c9d3568ed7491e91fdc7c8463755b62aac3 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 5 Jan 2016 14:30:06 -0500 Subject: [PATCH 322/322] Safe cluster state task notifications This commit addresses an issue where a cluster state task listener throwing an exception could prevent other listeners from being notified, and could prevent the executor from receiving notifications that a new cluster state was published. Additionally, this commit also addresses a similar issue for executors handling cluster state publication notifications. --- .../service/InternalClusterService.java | 96 +++++++++++++++++++ .../cluster/ClusterServiceIT.java | 53 ++++++++++ 2 files changed, 149 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 5e945d372fe..ca135728b87 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -313,6 +313,13 @@ public class InternalClusterService extends AbstractLifecycleComponent executor, final ClusterStateTaskListener listener ) { + innerSubmitStateUpdateTask(source, task, config, executor, safe(listener, logger)); + } + + private void innerSubmitStateUpdateTask(final String source, final T task, + final ClusterStateTaskConfig config, + final ClusterStateTaskExecutor executor, + final SafeClusterStateTaskListener listener) { if (!lifecycle.started()) { return; } @@ -640,6 +647,95 @@ public class InternalClusterService extends AbstractLifecycleComponent extends SourcePrioritizedRunnable { public final T task; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index d9096f5fd0d..6e7e338d8b9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -743,6 +743,59 @@ public class ClusterServiceIT extends ESIntegTestCase { } } + /* + * test that a listener throwing an exception while handling a + * notification does not prevent publication notification to the + * executor + */ + public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { + Settings settings = settingsBuilder() + .put("discovery.type", "local") + .build(); + internalCluster().startNode(settings); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class); + + final CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean published = new AtomicBoolean(); + + clusterService.submitStateUpdateTask( + "testClusterStateTaskListenerThrowingExceptionIsOkay", + new Object(), + ClusterStateTaskConfig.build(Priority.NORMAL), + new ClusterStateTaskExecutor() { + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState newClusterState = ClusterState.builder(currentState).build(); + return BatchResult.builder().successes(tasks).build(newClusterState); + } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.set(true); + latch.countDown(); + } + }, + new ClusterStateTaskListener() { + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + throw new IllegalStateException(source); + } + + @Override + public void onFailure(String source, Throwable t) { + } + } + ); + + latch.await(); + assertTrue(published.get()); + } + public void testClusterStateBatchedUpdates() throws InterruptedException { Settings settings = settingsBuilder() .put("discovery.type", "local")