Remove data stream feature flag (#59572)

so that it can used in the next minor release (7.9.0).

Backport of #59504 to 7.x branch.
Closes #53100
This commit is contained in:
Martijn van Groningen 2020-07-14 23:50:41 +02:00 committed by GitHub
parent 3b688bfee5
commit 35ae3d19db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 13 additions and 78 deletions

View File

@ -484,17 +484,6 @@ allprojects {
}
}
// TODO: remove this once 7.7 is released and the 7.x branch is 7.8
subprojects {
pluginManager.withPlugin('elasticsearch.testclusters') {
testClusters.all {
if (org.elasticsearch.gradle.info.BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.datastreams_feature_enabled', 'true'
}
}
}
}
subprojects {
project.ext.disableTasks = { String... tasknames ->
for (String taskname : tasknames) {

View File

@ -122,7 +122,7 @@ dependencies {
// repackaged jna with native bits linked against all elastic supported platforms
api "org.elasticsearch:jna:${versions.jna}"
testImplementation(project(":test:framework")) {
// tests use the locally compiled version of server
exclude group: 'org.elasticsearch', module: 'server'
@ -333,12 +333,6 @@ licenseHeaders {
excludes << 'org/elasticsearch/common/inject/**/*'
}
tasks.named('internalClusterTest').configure {
if (org.elasticsearch.gradle.info.BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.datastreams_feature_enabled', 'true'
}
}
licenseHeaders {
excludes << 'org/elasticsearch/client/documentation/placeholder.txt'
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.Build;
import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction;
import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction;
import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction;
@ -416,21 +415,6 @@ public class ActionModule extends AbstractModule {
private static final Logger logger = LogManager.getLogger(ActionModule.class);
private final boolean transportClient;
public static final boolean DATASTREAMS_FEATURE_ENABLED;
static {
final String property = System.getProperty("es.datastreams_feature_enabled");
if (Build.CURRENT.isSnapshot() || "true".equals(property)) {
DATASTREAMS_FEATURE_ENABLED = true;
} else if ("false".equals(property) || property == null) {
DATASTREAMS_FEATURE_ENABLED = false;
} else {
throw new IllegalArgumentException(
"expected es.datastreams_feature_enabled to be unset or [true|false] but was [" + property + "]"
);
}
}
private final Settings settings;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final IndexScopedSettings indexScopedSettings;
@ -627,13 +611,11 @@ public class ActionModule extends AbstractModule {
actionPlugins.stream().flatMap(p -> p.getActions().stream()).forEach(actions::register);
// Data streams:
if (DATASTREAMS_FEATURE_ENABLED) {
actions.register(CreateDataStreamAction.INSTANCE, CreateDataStreamAction.TransportAction.class);
actions.register(DeleteDataStreamAction.INSTANCE, DeleteDataStreamAction.TransportAction.class);
actions.register(GetDataStreamAction.INSTANCE, GetDataStreamAction.TransportAction.class);
actions.register(ResolveIndexAction.INSTANCE, ResolveIndexAction.TransportAction.class);
actions.register(DataStreamsStatsAction.INSTANCE, DataStreamsStatsAction.TransportAction.class);
}
actions.register(CreateDataStreamAction.INSTANCE, CreateDataStreamAction.TransportAction.class);
actions.register(DeleteDataStreamAction.INSTANCE, DeleteDataStreamAction.TransportAction.class);
actions.register(GetDataStreamAction.INSTANCE, GetDataStreamAction.TransportAction.class);
actions.register(ResolveIndexAction.INSTANCE, ResolveIndexAction.TransportAction.class);
actions.register(DataStreamsStatsAction.INSTANCE, DataStreamsStatsAction.TransportAction.class);
// Persistent tasks:
actions.register(StartPersistentTaskAction.INSTANCE, StartPersistentTaskAction.TransportAction.class);
@ -793,13 +775,11 @@ public class ActionModule extends AbstractModule {
registerHandler.accept(new RestDeleteDanglingIndexAction());
// Data Stream API
if (DATASTREAMS_FEATURE_ENABLED) {
registerHandler.accept(new RestCreateDataStreamAction());
registerHandler.accept(new RestDeleteDataStreamAction());
registerHandler.accept(new RestGetDataStreamsAction());
registerHandler.accept(new RestResolveIndexAction());
registerHandler.accept(new RestDataStreamsStatsAction());
}
registerHandler.accept(new RestCreateDataStreamAction());
registerHandler.accept(new RestDeleteDataStreamAction());
registerHandler.accept(new RestGetDataStreamsAction());
registerHandler.accept(new RestResolveIndexAction());
registerHandler.accept(new RestDataStreamsStatsAction());
// CAT API
registerHandler.accept(new RestAllocationAction());

View File

@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.datastream.DeleteDataStreamAction;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
@ -141,7 +140,7 @@ public abstract class TestCluster implements Closeable {
*/
public void wipeAllDataStreams() {
// Feature flag may not be enabled in all gradle modules that use ESIntegTestCase
if (size() > 0 && ActionModule.DATASTREAMS_FEATURE_ENABLED) {
if (size() > 0) {
AcknowledgedResponse response =
client().admin().indices().deleteDataStream(new DeleteDataStreamAction.Request(new String[]{"*"})).actionGet();
assertAcked(response);

View File

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.info.BuildParams
evaluationDependsOn(xpackModule('core'))
apply plugin: 'elasticsearch.esplugin'
@ -13,17 +11,6 @@ esplugin {
archivesBaseName = 'x-pack-data-streams'
integTest.enabled = false
tasks.named('internalClusterTest').configure {
if (BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.datastreams_feature_enabled', 'true'
}
}
tasks.named('test').configure {
if (org.elasticsearch.gradle.info.BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.datastreams_feature_enabled', 'true'
}
}
dependencies {
compileOnly project(path: xpackModule('core'), configuration: 'default')
testImplementation project(path: xpackModule('core'), configuration: 'testArtifacts')

View File

@ -21,8 +21,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.ActionModule.DATASTREAMS_FEATURE_ENABLED;
public class DataStreamsPlugin extends Plugin implements ActionPlugin, MapperPlugin {
private final boolean transportClientMode;
@ -33,11 +31,7 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin, MapperPlu
@Override
public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() {
if (DATASTREAMS_FEATURE_ENABLED) {
return Collections.singletonMap(DataStreamTimestampFieldMapper.NAME, new DataStreamTimestampFieldMapper.TypeParser());
} else {
return Collections.emptyMap();
}
return Collections.singletonMap(DataStreamTimestampFieldMapper.NAME, new DataStreamTimestampFieldMapper.TypeParser());
}
public Collection<Module> createGuiceModules() {

View File

@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.integration;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
@ -147,7 +146,6 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
}
public void testWithDatastreams() throws Exception {
assumeTrue("should only run if data streams are enabled", ActionModule.DATASTREAMS_FEATURE_ENABLED);
initialize("classification_with_datastreams", true);
String predictedClassField = KEYWORD_FIELD + "_prediction";
indexData(sourceIndex, 300, 50, KEYWORD_FIELD);

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
@ -103,7 +102,6 @@ public class DatafeedJobsIT extends MlNativeAutodetectIntegTestCase {
}
public void testLookbackOnlyDataStream() throws Exception {
assumeTrue("should only run if data streams are enabled", ActionModule.DATASTREAMS_FEATURE_ENABLED);
String mapping = "{\n" +
" \"properties\": {\n" +
" \"time\": {\n" +

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
@ -399,7 +398,6 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase {
}
public void testWithDatastream() throws Exception {
assumeTrue("should only run if data streams are enabled", ActionModule.DATASTREAMS_FEATURE_ENABLED);
initialize("regression_with_datastream");
String predictedClassField = DEPENDENT_VARIABLE_FIELD + "_prediction";
indexData(sourceIndex, 300, 50, true);

View File

@ -8,7 +8,6 @@ package org.elasticsearch.xpack.transform.integration;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
@ -97,7 +96,6 @@ public class TransformPivotRestIT extends TransformRestTestCase {
}
public void testSimpleDataStreamPivot() throws Exception {
assumeTrue("should only run if data streams are enabled", ActionModule.DATASTREAMS_FEATURE_ENABLED);
String indexName = "reviews_data_stream";
createReviewsIndex(indexName, 1000, "date", true);
String transformId = "simple_data_stream_pivot";