[Transform] Rename data frame plugin to transform: classes in xpack.core (#46644) (#46734)

rename classes in xpack.core of transform plugin from "data frame transform" to "transform"
This commit is contained in:
Hendrik Muhs 2019-09-16 13:39:22 +02:00 committed by GitHub
parent 16bed7f017
commit c8f52ec4ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
145 changed files with 1861 additions and 1861 deletions

View File

@ -22,7 +22,7 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerPosition;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition;
import java.util.LinkedHashMap;
import java.util.Map;
@ -30,23 +30,23 @@ import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameIndexerPositionTests extends AbstractResponseTestCase<
DataFrameIndexerPosition,
TransformIndexerPosition,
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition> {
public static DataFrameIndexerPosition fromHlrc(
public static TransformIndexerPosition fromHlrc(
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition instance) {
if (instance == null) {
return null;
}
return new DataFrameIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition());
return new TransformIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition());
}
public static DataFrameIndexerPosition randomDataFrameIndexerPosition() {
return new DataFrameIndexerPosition(randomPositionMap(), randomPositionMap());
public static TransformIndexerPosition randomDataFrameIndexerPosition() {
return new TransformIndexerPosition(randomPositionMap(), randomPositionMap());
}
@Override
protected DataFrameIndexerPosition createServerTestInstance(XContentType xContentType) {
protected TransformIndexerPosition createServerTestInstance(XContentType xContentType) {
return randomDataFrameIndexerPosition();
}
@ -56,7 +56,7 @@ public class DataFrameIndexerPositionTests extends AbstractResponseTestCase<
}
@Override
protected void assertInstances(DataFrameIndexerPosition serverTestInstance,
protected void assertInstances(TransformIndexerPosition serverTestInstance,
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition clientInstance) {
assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition()));
assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition()));

View File

@ -21,17 +21,17 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractHlrcXContentTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import java.io.IOException;
public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTestCase<
DataFrameIndexerTransformStats,
TransformIndexerStats,
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats> {
public static DataFrameIndexerTransformStats fromHlrc(
public static TransformIndexerStats fromHlrc(
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) {
return new DataFrameIndexerTransformStats(
return new TransformIndexerStats(
instance.getNumPages(),
instance.getNumDocuments(),
instance.getOutputDocuments(),
@ -54,13 +54,13 @@ public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTes
}
@Override
public DataFrameIndexerTransformStats convertHlrcToInternal(
public TransformIndexerStats convertHlrcToInternal(
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) {
return fromHlrc(instance);
}
public static DataFrameIndexerTransformStats randomStats() {
return new DataFrameIndexerTransformStats(randomLongBetween(10L, 10000L),
public static TransformIndexerStats randomStats() {
return new TransformIndexerStats(randomLongBetween(10L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
@ -70,13 +70,13 @@ public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTes
}
@Override
protected DataFrameIndexerTransformStats createTestInstance() {
protected TransformIndexerStats createTestInstance() {
return randomStats();
}
@Override
protected DataFrameIndexerTransformStats doParseInstance(XContentParser parser) throws IOException {
return DataFrameIndexerTransformStats.fromXContent(parser);
protected TransformIndexerStats doParseInstance(XContentParser parser) throws IOException {
return TransformIndexerStats.fromXContent(parser);
}
@Override

View File

@ -22,34 +22,34 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformCheckpointStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformCheckpointStatsTests extends AbstractResponseTestCase<
DataFrameTransformCheckpointStats,
TransformCheckpointStats,
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats> {
public static DataFrameTransformCheckpointStats fromHlrc(
public static TransformCheckpointStats fromHlrc(
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats instance) {
return new DataFrameTransformCheckpointStats(instance.getCheckpoint(),
return new TransformCheckpointStats(instance.getCheckpoint(),
DataFrameIndexerPositionTests.fromHlrc(instance.getPosition()),
DataFrameTransformProgressTests.fromHlrc(instance.getCheckpointProgress()),
instance.getTimestampMillis(),
instance.getTimeUpperBoundMillis());
}
public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000),
public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
}
@Override
protected DataFrameTransformCheckpointStats createServerTestInstance(XContentType xContentType) {
protected TransformCheckpointStats createServerTestInstance(XContentType xContentType) {
return randomDataFrameTransformCheckpointStats();
}
@ -60,7 +60,7 @@ public class DataFrameTransformCheckpointStatsTests extends AbstractResponseTest
}
@Override
protected void assertInstances(DataFrameTransformCheckpointStats serverTestInstance,
protected void assertInstances(TransformCheckpointStats serverTestInstance,
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats clientInstance) {
assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint()));
assertThat(serverTestInstance.getPosition().getBucketsPosition(), equalTo(clientInstance.getPosition().getBucketsPosition()));

View File

@ -21,19 +21,19 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractHlrcXContentTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformCheckpointingInfo;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo;
import java.io.IOException;
import java.time.Instant;
import java.util.function.Predicate;
public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase<
DataFrameTransformCheckpointingInfo,
TransformCheckpointingInfo,
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo> {
public static DataFrameTransformCheckpointingInfo fromHlrc(
public static TransformCheckpointingInfo fromHlrc(
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) {
return new DataFrameTransformCheckpointingInfo(
return new TransformCheckpointingInfo(
DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getLast()),
DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getNext()),
instance.getOperationsBehind(),
@ -46,13 +46,13 @@ public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXConte
}
@Override
public DataFrameTransformCheckpointingInfo convertHlrcToInternal(
public TransformCheckpointingInfo convertHlrcToInternal(
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) {
return fromHlrc(instance);
}
public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new DataFrameTransformCheckpointingInfo(
public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new TransformCheckpointingInfo(
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
randomNonNegativeLong(),
@ -60,13 +60,13 @@ public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXConte
}
@Override
protected DataFrameTransformCheckpointingInfo createTestInstance() {
protected TransformCheckpointingInfo createTestInstance() {
return randomDataFrameTransformCheckpointingInfo();
}
@Override
protected DataFrameTransformCheckpointingInfo doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformCheckpointingInfo.fromXContent(parser);
protected TransformCheckpointingInfo doParseInstance(XContentParser parser) throws IOException {
return TransformCheckpointingInfo.fromXContent(parser);
}
@Override

View File

@ -22,29 +22,29 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
DataFrameTransformProgress,
TransformProgress,
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress> {
public static DataFrameTransformProgress fromHlrc(
public static TransformProgress fromHlrc(
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress instance) {
if (instance == null) {
return null;
}
return new DataFrameTransformProgress(instance.getTotalDocs(),
return new TransformProgress(instance.getTotalDocs(),
instance.getRemainingDocs(),
instance.getDocumentsProcessed(),
instance.getDocumentsIndexed());
}
public static DataFrameTransformProgress randomDataFrameTransformProgress() {
public static TransformProgress randomDataFrameTransformProgress() {
Long totalDocs = randomBoolean() ? null : randomNonNegativeLong();
Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null;
return new DataFrameTransformProgress(
return new TransformProgress(
totalDocs,
docsRemaining,
totalDocs != null ? totalDocs - docsRemaining : randomNonNegativeLong(),
@ -52,7 +52,7 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
}
@Override
protected DataFrameTransformProgress createServerTestInstance(XContentType xContentType) {
protected TransformProgress createServerTestInstance(XContentType xContentType) {
return randomDataFrameTransformProgress();
}
@ -62,7 +62,7 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
}
@Override
protected void assertInstances(DataFrameTransformProgress serverTestInstance,
protected void assertInstances(TransformProgress serverTestInstance,
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress clientInstance) {
assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs()));
assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed()));

View File

@ -21,11 +21,11 @@ package org.elasticsearch.client.transform.transforms.hlrc;
import org.elasticsearch.client.AbstractHlrcXContentTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformCheckpointStats;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformCheckpointingInfo;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;
import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes;
import java.io.IOException;
@ -34,7 +34,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.function.Predicate;
public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<DataFrameTransformStats,
public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<TransformStats,
org.elasticsearch.client.transform.transforms.DataFrameTransformStats> {
public static NodeAttributes fromHlrc(org.elasticsearch.client.transform.transforms.NodeAttributes attributes) {
@ -45,11 +45,11 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<D
attributes.getAttributes());
}
public static DataFrameTransformStats
public static TransformStats
fromHlrc(org.elasticsearch.client.transform.transforms.DataFrameTransformStats instance) {
return new DataFrameTransformStats(instance.getId(),
DataFrameTransformStats.State.fromString(instance.getState().value()),
return new TransformStats(instance.getId(),
TransformStats.State.fromString(instance.getState().value()),
instance.getReason(),
fromHlrc(instance.getNode()),
DataFrameIndexerTransformStatsTests.fromHlrc(instance.getIndexerStats()),
@ -63,19 +63,19 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<D
}
@Override
public DataFrameTransformStats convertHlrcToInternal(
public TransformStats convertHlrcToInternal(
org.elasticsearch.client.transform.transforms.DataFrameTransformStats instance) {
return new DataFrameTransformStats(instance.getId(),
DataFrameTransformStats.State.fromString(instance.getState().value()),
return new TransformStats(instance.getId(),
TransformStats.State.fromString(instance.getState().value()),
instance.getReason(),
fromHlrc(instance.getNode()),
DataFrameIndexerTransformStatsTests.fromHlrc(instance.getIndexerStats()),
DataFrameTransformCheckpointingInfoTests.fromHlrc(instance.getCheckpointingInfo()));
}
public static DataFrameTransformStats randomDataFrameTransformStats() {
return new DataFrameTransformStats(randomAlphaOfLength(10),
randomFrom(DataFrameTransformStats.State.values()),
public static TransformStats randomDataFrameTransformStats() {
return new TransformStats(randomAlphaOfLength(10),
randomFrom(TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : randomNodeAttributes(),
randomStats(),
@ -83,13 +83,13 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<D
}
@Override
protected DataFrameTransformStats createTestInstance() {
protected TransformStats createTestInstance() {
return randomDataFrameTransformStats();
}
@Override
protected DataFrameTransformStats doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformStats.PARSER.apply(parser, null);
protected TransformStats doParseInstance(XContentParser parser) throws IOException {
return TransformStats.PARSER.apply(parser, null);
}
@Override
@ -97,24 +97,24 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<D
return field -> field.contains("position") || field.equals("node.attributes");
}
public static DataFrameTransformProgress randomDataFrameTransformProgress() {
public static TransformProgress randomDataFrameTransformProgress() {
Long totalDocs = randomBoolean() ? null : randomNonNegativeLong();
Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null;
return new DataFrameTransformProgress(
return new TransformProgress(
totalDocs,
docsRemaining,
totalDocs != null ? totalDocs - docsRemaining : randomNonNegativeLong(),
randomBoolean() ? null : randomNonNegativeLong());
}
public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new DataFrameTransformCheckpointingInfo(randomDataFrameTransformCheckpointStats(),
public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new TransformCheckpointingInfo(randomDataFrameTransformCheckpointStats(),
randomDataFrameTransformCheckpointStats(), randomNonNegativeLong(),
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
}
public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000),
public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
@ -134,8 +134,8 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<D
attributes);
}
public static DataFrameIndexerTransformStats randomStats() {
return new DataFrameIndexerTransformStats(randomLongBetween(10L, 10000L),
public static TransformIndexerStats randomStats() {
return new TransformIndexerStats(randomLongBetween(10L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),

View File

@ -67,10 +67,6 @@ Example response:
"available" : true,
"enabled" : true
},
"data_frame" : {
"available" : true,
"enabled" : true
},
"analytics" : {
"available" : true,
"enabled" : true
@ -123,6 +119,10 @@ Example response:
"available" : true,
"enabled" : true
},
"transform" : {
"available" : true,
"enabled" : true
},
"vectors" : {
"available" : true,
"enabled" : true

View File

@ -201,18 +201,18 @@ import org.elasticsearch.xpack.core.spatial.SpatialFeatureSetUsage;
import org.elasticsearch.xpack.core.sql.SqlFeatureSetUsage;
import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction;
import org.elasticsearch.xpack.core.transform.DataFrameFeatureSetUsage;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformTaskAction;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransform;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformState;
import org.elasticsearch.xpack.core.transform.TransformFeatureSetUsage;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartTransformTaskAction;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction;
import org.elasticsearch.xpack.core.transform.transforms.Transform;
import org.elasticsearch.xpack.core.transform.transforms.TransformState;
import org.elasticsearch.xpack.core.transform.transforms.SyncConfig;
import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig;
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction;
@ -422,14 +422,14 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
// Freeze
FreezeIndexAction.INSTANCE,
// Data Frame
PutDataFrameTransformAction.INSTANCE,
StartDataFrameTransformAction.INSTANCE,
StartDataFrameTransformTaskAction.INSTANCE,
StopDataFrameTransformAction.INSTANCE,
DeleteDataFrameTransformAction.INSTANCE,
GetDataFrameTransformsAction.INSTANCE,
GetDataFrameTransformsStatsAction.INSTANCE,
PreviewDataFrameTransformAction.INSTANCE
PutTransformAction.INSTANCE,
StartTransformAction.INSTANCE,
StartTransformTaskAction.INSTANCE,
StopTransformAction.INSTANCE,
DeleteTransformAction.INSTANCE,
GetTransformsAction.INSTANCE,
GetTransformsStatsAction.INSTANCE,
PreviewTransformAction.INSTANCE
);
}
@ -534,11 +534,11 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new),
// Data Frame
new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_FRAME, DataFrameFeatureSetUsage::new),
new NamedWriteableRegistry.Entry(PersistentTaskParams.class, DataFrameField.TASK_NAME, DataFrameTransform::new),
new NamedWriteableRegistry.Entry(Task.Status.class, DataFrameField.TASK_NAME, DataFrameTransformState::new),
new NamedWriteableRegistry.Entry(PersistentTaskState.class, DataFrameField.TASK_NAME, DataFrameTransformState::new),
new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(), TimeSyncConfig::new),
new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.Transform, TransformFeatureSetUsage::new),
new NamedWriteableRegistry.Entry(PersistentTaskParams.class, TransformField.TASK_NAME, Transform::new),
new NamedWriteableRegistry.Entry(Task.Status.class, TransformField.TASK_NAME, TransformState::new),
new NamedWriteableRegistry.Entry(PersistentTaskState.class, TransformField.TASK_NAME, TransformState::new),
new NamedWriteableRegistry.Entry(SyncConfig.class, TransformField.TIME_BASED_SYNC.getPreferredName(), TimeSyncConfig::new),
new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.FLATTENED, FlattenedFeatureSetUsage::new),
// Vectors
new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.VECTORS, VectorsFeatureSetUsage::new),
@ -585,12 +585,12 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(RollupJobStatus.NAME),
RollupJobStatus::fromXContent),
// Data Frame
new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(DataFrameField.TASK_NAME),
DataFrameTransform::fromXContent),
new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(DataFrameField.TASK_NAME),
DataFrameTransformState::fromXContent),
new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(DataFrameField.TASK_NAME),
DataFrameTransformState::fromXContent)
new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(TransformField.TASK_NAME),
Transform::fromXContent),
new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(TransformField.TASK_NAME),
TransformState::fromXContent),
new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(TransformField.TASK_NAME),
TransformState::fromXContent)
);
}

View File

@ -35,8 +35,8 @@ public final class XPackField {
public static final String INDEX_LIFECYCLE = "ilm";
/** Name constant for the CCR feature. */
public static final String CCR = "ccr";
/** Name constant for the data frame feature. */
public static final String DATA_FRAME = "data_frame";
/** Name constant for the transform feature. */
public static final String Transform = "transform";
/** Name constant for flattened fields. */
public static final String FLATTENED = "flattened";
/** Name constant for the vectors feature. */

View File

@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.XPackFeatureSet.Usage;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.XPackField;
import java.io.IOException;
@ -19,20 +19,20 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
public class DataFrameFeatureSetUsage extends Usage {
public class TransformFeatureSetUsage extends Usage {
private final Map<String, Long> transformCountByState;
private final DataFrameIndexerTransformStats accumulatedStats;
private final TransformIndexerStats accumulatedStats;
public DataFrameFeatureSetUsage(StreamInput in) throws IOException {
public TransformFeatureSetUsage(StreamInput in) throws IOException {
super(in);
this.transformCountByState = in.readMap(StreamInput::readString, StreamInput::readLong);
this.accumulatedStats = new DataFrameIndexerTransformStats(in);
this.accumulatedStats = new TransformIndexerStats(in);
}
public DataFrameFeatureSetUsage(boolean available, boolean enabled, Map<String, Long> transformCountByState,
DataFrameIndexerTransformStats accumulatedStats) {
super(XPackField.DATA_FRAME, available, enabled);
public TransformFeatureSetUsage(boolean available, boolean enabled, Map<String, Long> transformCountByState,
TransformIndexerStats accumulatedStats) {
super(XPackField.Transform, available, enabled);
this.transformCountByState = Objects.requireNonNull(transformCountByState);
this.accumulatedStats = Objects.requireNonNull(accumulatedStats);
}
@ -48,7 +48,7 @@ public class DataFrameFeatureSetUsage extends Usage {
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
super.innerXContent(builder, params);
if (transformCountByState.isEmpty() == false) {
builder.startObject(DataFrameField.TRANSFORMS.getPreferredName());
builder.startObject(TransformField.TRANSFORMS.getPreferredName());
long all = 0L;
for (Entry<String, Long> entry : transformCountByState.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
@ -58,7 +58,7 @@ public class DataFrameFeatureSetUsage extends Usage {
builder.endObject();
// if there are no transforms, do not show any stats
builder.field(DataFrameField.STATS_FIELD.getPreferredName(), accumulatedStats);
builder.field(TransformField.STATS_FIELD.getPreferredName(), accumulatedStats);
}
}
@ -75,7 +75,7 @@ public class DataFrameFeatureSetUsage extends Usage {
if (getClass() != obj.getClass()) {
return false;
}
DataFrameFeatureSetUsage other = (DataFrameFeatureSetUsage) obj;
TransformFeatureSetUsage other = (TransformFeatureSetUsage) obj;
return Objects.equals(name, other.name) && available == other.available && enabled == other.enabled
&& Objects.equals(transformCountByState, other.transformCountByState)
&& Objects.equals(accumulatedStats, other.accumulatedStats);

View File

@ -11,7 +11,7 @@ import org.elasticsearch.common.ParseField;
/*
* Utility class to hold common fields and strings for data frame.
*/
public final class DataFrameField {
public final class TransformField {
// common parse fields
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
@ -81,6 +81,6 @@ public final class DataFrameField {
// internal document id
public static String DOCUMENT_ID_FIELD = "_id";
private DataFrameField() {
private TransformField() {
}
}

View File

@ -9,7 +9,7 @@ package org.elasticsearch.xpack.core.transform;
import java.text.MessageFormat;
import java.util.Locale;
public class DataFrameMessages {
public class TransformMessages {
public static final String REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT =
"Timed out after [{0}] while waiting for data frame transform [{1}] to stop";
@ -83,7 +83,7 @@ public class DataFrameMessages {
public static final String ID_TOO_LONG = "The id cannot contain more than {0} characters.";
public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " +
"underscores; must start and end with alphanumeric";
private DataFrameMessages() {
private TransformMessages() {
}
/**

View File

@ -14,13 +14,13 @@ import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig;
import java.util.Arrays;
import java.util.List;
public class DataFrameNamedXContentProvider implements NamedXContentProvider {
public class TransformNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return Arrays.asList(
new NamedXContentRegistry.Entry(SyncConfig.class,
DataFrameField.TIME_BASED_SYNC,
TransformField.TIME_BASED_SYNC,
TimeSyncConfig::parse));
}
}

View File

@ -12,18 +12,18 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
public class DeleteDataFrameTransformAction extends ActionType<AcknowledgedResponse> {
public class DeleteTransformAction extends ActionType<AcknowledgedResponse> {
public static final DeleteDataFrameTransformAction INSTANCE = new DeleteDataFrameTransformAction();
public static final DeleteTransformAction INSTANCE = new DeleteTransformAction();
public static final String NAME = "cluster:admin/data_frame/delete";
private DeleteDataFrameTransformAction() {
private DeleteTransformAction() {
super(NAME, AcknowledgedResponse::new);
}
@ -32,7 +32,7 @@ public class DeleteDataFrameTransformAction extends ActionType<AcknowledgedRespo
private final boolean force;
public Request(String id, boolean force) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName());
this.force = force;
}

View File

@ -19,8 +19,8 @@ import org.elasticsearch.xpack.core.action.AbstractGetResourcesRequest;
import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse;
import org.elasticsearch.xpack.core.action.util.PageParams;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.io.IOException;
import java.util.ArrayList;
@ -28,16 +28,16 @@ import java.util.List;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransformsAction.Response> {
public class GetTransformsAction extends ActionType<GetTransformsAction.Response> {
public static final GetDataFrameTransformsAction INSTANCE = new GetDataFrameTransformsAction();
public static final GetTransformsAction INSTANCE = new GetTransformsAction();
public static final String NAME = "cluster:monitor/data_frame/get";
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
LogManager.getLogger(GetDataFrameTransformsAction.class));
LogManager.getLogger(GetTransformsAction.class));
private GetDataFrameTransformsAction() {
super(NAME, GetDataFrameTransformsAction.Response::new);
private GetTransformsAction() {
super(NAME, GetTransformsAction.Response::new);
}
public static class Request extends AbstractGetResourcesRequest {
@ -72,17 +72,17 @@ public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransfo
@Override
public String getResourceIdField() {
return DataFrameField.ID.getPreferredName();
return TransformField.ID.getPreferredName();
}
}
public static class Response extends AbstractGetResourcesResponse<DataFrameTransformConfig> implements Writeable, ToXContentObject {
public static class Response extends AbstractGetResourcesResponse<TransformConfig> implements Writeable, ToXContentObject {
public static final String INVALID_TRANSFORMS_DEPRECATION_WARNING = "Found [{}] invalid transforms";
private static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
public Response(List<DataFrameTransformConfig> transformConfigs, long count) {
super(new QueryPage<>(transformConfigs, count, DataFrameField.TRANSFORMS));
public Response(List<TransformConfig> transformConfigs, long count) {
super(new QueryPage<>(transformConfigs, count, TransformField.TRANSFORMS));
}
public Response() {
@ -93,7 +93,7 @@ public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransfo
super(in);
}
public List<DataFrameTransformConfig> getTransformConfigurations() {
public List<TransformConfig> getTransformConfigurations() {
return getResources().results();
}
@ -101,11 +101,11 @@ public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransfo
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
List<String> invalidTransforms = new ArrayList<>();
builder.startObject();
builder.field(DataFrameField.COUNT.getPreferredName(), getResources().count());
builder.field(TransformField.COUNT.getPreferredName(), getResources().count());
// XContentBuilder does not support passing the params object for Iterables
builder.field(DataFrameField.TRANSFORMS.getPreferredName());
builder.field(TransformField.TRANSFORMS.getPreferredName());
builder.startArray();
for (DataFrameTransformConfig configResponse : getResources().results()) {
for (TransformConfig configResponse : getResources().results()) {
configResponse.toXContent(builder, params);
if (configResponse.isValid() == false) {
invalidTransforms.add(configResponse.getId());
@ -114,8 +114,8 @@ public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransfo
builder.endArray();
if (invalidTransforms.isEmpty() == false) {
builder.startObject(INVALID_TRANSFORMS.getPreferredName());
builder.field(DataFrameField.COUNT.getPreferredName(), invalidTransforms.size());
builder.field(DataFrameField.TRANSFORMS.getPreferredName(), invalidTransforms);
builder.field(TransformField.COUNT.getPreferredName(), invalidTransforms.size());
builder.field(TransformField.TRANSFORMS.getPreferredName(), invalidTransforms);
builder.endObject();
deprecationLogger.deprecated(INVALID_TRANSFORMS_DEPRECATION_WARNING, invalidTransforms.size());
}
@ -125,8 +125,8 @@ public class GetDataFrameTransformsAction extends ActionType<GetDataFrameTransfo
}
@Override
protected Reader<DataFrameTransformConfig> getReader() {
return DataFrameTransformConfig::new;
protected Reader<TransformConfig> getReader() {
return TransformConfig::new;
}
}
}

View File

@ -22,8 +22,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.action.util.PageParams;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStats;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
@ -34,12 +34,12 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class GetDataFrameTransformsStatsAction extends ActionType<GetDataFrameTransformsStatsAction.Response> {
public class GetTransformsStatsAction extends ActionType<GetTransformsStatsAction.Response> {
public static final GetDataFrameTransformsStatsAction INSTANCE = new GetDataFrameTransformsStatsAction();
public static final GetTransformsStatsAction INSTANCE = new GetTransformsStatsAction();
public static final String NAME = "cluster:monitor/data_frame/stats/get";
public GetDataFrameTransformsStatsAction() {
super(NAME, GetDataFrameTransformsStatsAction.Response::new);
public GetTransformsStatsAction() {
super(NAME, GetTransformsStatsAction.Response::new);
}
public static class Request extends BaseTasksRequest<Request> {
@ -74,7 +74,7 @@ public class GetDataFrameTransformsStatsAction extends ActionType<GetDataFrameTr
public boolean match(Task task) {
// Only get tasks that we have expanded to
return expandedIds.stream()
.anyMatch(transformId -> task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transformId));
.anyMatch(transformId -> task.getDescription().equals(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transformId));
}
public String getId() {
@ -147,24 +147,24 @@ public class GetDataFrameTransformsStatsAction extends ActionType<GetDataFrameTr
}
public static class Response extends BaseTasksResponse implements ToXContentObject {
private final QueryPage<DataFrameTransformStats> transformsStats;
private final QueryPage<TransformStats> transformsStats;
public Response(List<DataFrameTransformStats> transformStateAndStats, long count) {
this(new QueryPage<>(transformStateAndStats, count, DataFrameField.TRANSFORMS));
public Response(List<TransformStats> transformStateAndStats, long count) {
this(new QueryPage<>(transformStateAndStats, count, TransformField.TRANSFORMS));
}
public Response(List<DataFrameTransformStats> transformStateAndStats,
public Response(List<TransformStats> transformStateAndStats,
long count,
List<TaskOperationFailure> taskFailures,
List<? extends ElasticsearchException> nodeFailures) {
this(new QueryPage<>(transformStateAndStats, count, DataFrameField.TRANSFORMS), taskFailures, nodeFailures);
this(new QueryPage<>(transformStateAndStats, count, TransformField.TRANSFORMS), taskFailures, nodeFailures);
}
private Response(QueryPage<DataFrameTransformStats> transformsStats) {
private Response(QueryPage<TransformStats> transformsStats) {
this(transformsStats, Collections.emptyList(), Collections.emptyList());
}
private Response(QueryPage<DataFrameTransformStats> transformsStats,
private Response(QueryPage<TransformStats> transformsStats,
List<TaskOperationFailure> taskFailures,
List<? extends ElasticsearchException> nodeFailures) {
super(taskFailures, nodeFailures);
@ -174,14 +174,14 @@ public class GetDataFrameTransformsStatsAction extends ActionType<GetDataFrameTr
public Response(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
transformsStats = new QueryPage<>(in, DataFrameTransformStats::new);
transformsStats = new QueryPage<>(in, TransformStats::new);
} else {
List<DataFrameTransformStats> stats = in.readList(DataFrameTransformStats::new);
transformsStats = new QueryPage<>(stats, stats.size(), DataFrameField.TRANSFORMS);
List<TransformStats> stats = in.readList(TransformStats::new);
transformsStats = new QueryPage<>(stats, stats.size(), TransformField.TRANSFORMS);
}
}
public List<DataFrameTransformStats> getTransformsStats() {
public List<TransformStats> getTransformsStats() {
return transformsStats.results();
}

View File

@ -22,8 +22,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import java.io.IOException;
@ -36,26 +36,26 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class PreviewDataFrameTransformAction extends ActionType<PreviewDataFrameTransformAction.Response> {
public class PreviewTransformAction extends ActionType<PreviewTransformAction.Response> {
public static final PreviewDataFrameTransformAction INSTANCE = new PreviewDataFrameTransformAction();
public static final PreviewTransformAction INSTANCE = new PreviewTransformAction();
public static final String NAME = "cluster:admin/data_frame/preview";
private PreviewDataFrameTransformAction() {
super(NAME, PreviewDataFrameTransformAction.Response::new);
private PreviewTransformAction() {
super(NAME, PreviewTransformAction.Response::new);
}
public static class Request extends AcknowledgedRequest<Request> implements ToXContentObject {
private final DataFrameTransformConfig config;
private final TransformConfig config;
public Request(DataFrameTransformConfig config) {
public Request(TransformConfig config) {
this.config = config;
}
public Request(StreamInput in) throws IOException {
super(in);
this.config = new DataFrameTransformConfig(in);
this.config = new TransformConfig(in);
}
public static Request fromXContent(final XContentParser parser) throws IOException {
@ -64,7 +64,7 @@ public class PreviewDataFrameTransformAction extends ActionType<PreviewDataFrame
Map<String, String> tempDestination = new HashMap<>();
tempDestination.put(DestConfig.INDEX.getPreferredName(), "unused-transform-preview-index");
// Users can still provide just dest.pipeline to preview what their data would look like given the pipeline ID
Object providedDestination = content.get(DataFrameField.DESTINATION.getPreferredName());
Object providedDestination = content.get(TransformField.DESTINATION.getPreferredName());
if (providedDestination instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, String> destMap = (Map<String, String>)providedDestination;
@ -73,15 +73,15 @@ public class PreviewDataFrameTransformAction extends ActionType<PreviewDataFrame
tempDestination.put(DestConfig.PIPELINE.getPreferredName(), pipeline);
}
}
content.put(DataFrameField.DESTINATION.getPreferredName(), tempDestination);
content.put(DataFrameField.ID.getPreferredName(), "transform-preview");
content.put(TransformField.DESTINATION.getPreferredName(), tempDestination);
content.put(TransformField.ID.getPreferredName(), "transform-preview");
try(XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(content);
XContentParser newParser = XContentType.JSON
.xContent()
.createParser(parser.getXContentRegistry(),
LoggingDeprecationHandler.INSTANCE,
BytesReference.bytes(xContentBuilder).streamInput())) {
return new Request(DataFrameTransformConfig.fromXContent(newParser, "transform-preview", false));
return new Request(TransformConfig.fromXContent(newParser, "transform-preview", false));
}
}
@ -102,7 +102,7 @@ public class PreviewDataFrameTransformAction extends ActionType<PreviewDataFrame
return this.config.toXContent(builder, params);
}
public DataFrameTransformConfig getConfig() {
public TransformConfig getConfig() {
return config;
}

View File

@ -16,10 +16,10 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.utils.DataFrameStrings;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.utils.TransformStrings;
import java.io.IOException;
import java.util.Locale;
@ -28,31 +28,31 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.cluster.metadata.MetaDataCreateIndexService.validateIndexOrAliasName;
public class PutDataFrameTransformAction extends ActionType<AcknowledgedResponse> {
public class PutTransformAction extends ActionType<AcknowledgedResponse> {
public static final PutDataFrameTransformAction INSTANCE = new PutDataFrameTransformAction();
public static final PutTransformAction INSTANCE = new PutTransformAction();
public static final String NAME = "cluster:admin/data_frame/put";
private static final TimeValue MIN_FREQUENCY = TimeValue.timeValueSeconds(1);
private static final TimeValue MAX_FREQUENCY = TimeValue.timeValueHours(1);
private PutDataFrameTransformAction() {
private PutTransformAction() {
super(NAME, AcknowledgedResponse::new);
}
public static class Request extends AcknowledgedRequest<Request> {
private final DataFrameTransformConfig config;
private final TransformConfig config;
private final boolean deferValidation;
public Request(DataFrameTransformConfig config, boolean deferValidation) {
public Request(TransformConfig config, boolean deferValidation) {
this.config = config;
this.deferValidation = deferValidation;
}
public Request(StreamInput in) throws IOException {
super(in);
this.config = new DataFrameTransformConfig(in);
this.config = new TransformConfig(in);
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
this.deferValidation = in.readBoolean();
} else {
@ -61,12 +61,12 @@ public class PutDataFrameTransformAction extends ActionType<AcknowledgedResponse
}
public static Request fromXContent(final XContentParser parser, final String id, final boolean deferValidation) {
return new Request(DataFrameTransformConfig.fromXContent(parser, id, false), deferValidation);
return new Request(TransformConfig.fromXContent(parser, id, false), deferValidation);
}
/**
* More complex validations with how {@link DataFrameTransformConfig#getDestination()} and
* {@link DataFrameTransformConfig#getSource()} relate are done in the transport handler.
* More complex validations with how {@link TransformConfig#getDestination()} and
* {@link TransformConfig#getSource()} relate are done in the transport handler.
*/
@Override
public ActionRequestValidationException validate() {
@ -91,25 +91,25 @@ public class PutDataFrameTransformAction extends ActionType<AcknowledgedResponse
} catch (InvalidIndexNameException ex) {
validationException = addValidationError(ex.getMessage(), validationException);
}
if (DataFrameStrings.isValidId(config.getId()) == false) {
if (TransformStrings.isValidId(config.getId()) == false) {
validationException = addValidationError(
DataFrameMessages.getMessage(DataFrameMessages.INVALID_ID, DataFrameField.ID.getPreferredName(), config.getId()),
TransformMessages.getMessage(TransformMessages.INVALID_ID, TransformField.ID.getPreferredName(), config.getId()),
validationException);
}
if (DataFrameStrings.hasValidLengthForId(config.getId()) == false) {
if (TransformStrings.hasValidLengthForId(config.getId()) == false) {
validationException = addValidationError(
DataFrameMessages.getMessage(DataFrameMessages.ID_TOO_LONG, DataFrameStrings.ID_LENGTH_LIMIT),
TransformMessages.getMessage(TransformMessages.ID_TOO_LONG, TransformStrings.ID_LENGTH_LIMIT),
validationException);
}
TimeValue frequency = config.getFrequency();
if (frequency != null) {
if (frequency.compareTo(MIN_FREQUENCY) < 0) {
validationException = addValidationError(
"minimum permitted [" + DataFrameField.FREQUENCY + "] is [" + MIN_FREQUENCY.getStringRep() + "]",
"minimum permitted [" + TransformField.FREQUENCY + "] is [" + MIN_FREQUENCY.getStringRep() + "]",
validationException);
} else if (frequency.compareTo(MAX_FREQUENCY) > 0) {
validationException = addValidationError(
"highest permitted [" + DataFrameField.FREQUENCY + "] is [" + MAX_FREQUENCY.getStringRep() + "]",
"highest permitted [" + TransformField.FREQUENCY + "] is [" + MAX_FREQUENCY.getStringRep() + "]",
validationException);
}
}
@ -117,7 +117,7 @@ public class PutDataFrameTransformAction extends ActionType<AcknowledgedResponse
return validationException;
}
public DataFrameTransformConfig getConfig() {
public TransformConfig getConfig() {
return config;
}

View File

@ -14,20 +14,20 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Collections;
import java.util.Objects;
public class StartDataFrameTransformAction extends ActionType<StartDataFrameTransformAction.Response> {
public class StartTransformAction extends ActionType<StartTransformAction.Response> {
public static final StartDataFrameTransformAction INSTANCE = new StartDataFrameTransformAction();
public static final StartTransformAction INSTANCE = new StartTransformAction();
public static final String NAME = "cluster:admin/data_frame/start";
private StartDataFrameTransformAction() {
super(NAME, StartDataFrameTransformAction.Response::new);
private StartTransformAction() {
super(NAME, StartTransformAction.Response::new);
}
public static class Request extends AcknowledgedRequest<Request> {
@ -36,7 +36,7 @@ public class StartDataFrameTransformAction extends ActionType<StartDataFrameTran
private final boolean force;
public Request(String id, boolean force) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName());
this.force = force;
}

View File

@ -16,20 +16,20 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Collections;
import java.util.Objects;
public class StartDataFrameTransformTaskAction extends ActionType<StartDataFrameTransformTaskAction.Response> {
public class StartTransformTaskAction extends ActionType<StartTransformTaskAction.Response> {
public static final StartDataFrameTransformTaskAction INSTANCE = new StartDataFrameTransformTaskAction();
public static final StartTransformTaskAction INSTANCE = new StartTransformTaskAction();
public static final String NAME = "cluster:admin/data_frame/start_task";
private StartDataFrameTransformTaskAction() {
super(NAME, StartDataFrameTransformTaskAction.Response::new);
private StartTransformTaskAction() {
super(NAME, StartTransformTaskAction.Response::new);
}
public static class Request extends BaseTasksRequest<Request> {
@ -38,7 +38,7 @@ public class StartDataFrameTransformTaskAction extends ActionType<StartDataFrame
private final boolean force;
public Request(String id, boolean force) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName());
this.force = force;
}
@ -73,7 +73,7 @@ public class StartDataFrameTransformTaskAction extends ActionType<StartDataFrame
@Override
public boolean match(Task task) {
return task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id);
return task.getDescription().equals(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id);
}
@Override

View File

@ -20,7 +20,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
@ -32,15 +32,15 @@ import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
public class StopDataFrameTransformAction extends ActionType<StopDataFrameTransformAction.Response> {
public class StopTransformAction extends ActionType<StopTransformAction.Response> {
public static final StopDataFrameTransformAction INSTANCE = new StopDataFrameTransformAction();
public static final StopTransformAction INSTANCE = new StopTransformAction();
public static final String NAME = "cluster:admin/data_frame/stop";
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private StopDataFrameTransformAction() {
super(NAME, StopDataFrameTransformAction.Response::new);
private StopTransformAction() {
super(NAME, StopTransformAction.Response::new);
}
public static class Request extends BaseTasksRequest<Request> {
@ -51,7 +51,7 @@ public class StopDataFrameTransformAction extends ActionType<StopDataFrameTransf
private Set<String> expandedIds;
public Request(String id, boolean waitForCompletion, boolean force, @Nullable TimeValue timeout, boolean allowNoMatch) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName());
this.waitForCompletion = waitForCompletion;
this.force = force;
@ -151,8 +151,8 @@ public class StopDataFrameTransformAction extends ActionType<StopDataFrameTransf
@Override
public boolean match(Task task) {
if (task.getDescription().startsWith(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) {
String id = task.getDescription().substring(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length());
if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) {
String id = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length());
if (expandedIds != null) {
return expandedIds.contains(id);
}

View File

@ -17,9 +17,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigUpdate;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfigUpdate;
import java.io.IOException;
import java.util.Locale;
@ -28,25 +28,25 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.cluster.metadata.MetaDataCreateIndexService.validateIndexOrAliasName;
public class UpdateDataFrameTransformAction extends ActionType<UpdateDataFrameTransformAction.Response> {
public class UpdateTransformAction extends ActionType<UpdateTransformAction.Response> {
public static final UpdateDataFrameTransformAction INSTANCE = new UpdateDataFrameTransformAction();
public static final UpdateTransformAction INSTANCE = new UpdateTransformAction();
public static final String NAME = "cluster:admin/data_frame/update";
private static final TimeValue MIN_FREQUENCY = TimeValue.timeValueSeconds(1);
private static final TimeValue MAX_FREQUENCY = TimeValue.timeValueHours(1);
private UpdateDataFrameTransformAction() {
private UpdateTransformAction() {
super(NAME, Response::new);
}
public static class Request extends AcknowledgedRequest<Request> {
private final DataFrameTransformConfigUpdate update;
private final TransformConfigUpdate update;
private final String id;
private final boolean deferValidation;
public Request(DataFrameTransformConfigUpdate update, String id, boolean deferValidation) {
public Request(TransformConfigUpdate update, String id, boolean deferValidation) {
this.update = update;
this.id = id;
this.deferValidation = deferValidation;
@ -54,18 +54,18 @@ public class UpdateDataFrameTransformAction extends ActionType<UpdateDataFrameTr
public Request(StreamInput in) throws IOException {
super(in);
this.update = new DataFrameTransformConfigUpdate(in);
this.update = new TransformConfigUpdate(in);
this.id = in.readString();
this.deferValidation = in.readBoolean();
}
public static Request fromXContent(final XContentParser parser, final String id, final boolean deferValidation) {
return new Request(DataFrameTransformConfigUpdate.fromXContent(parser), id, deferValidation);
return new Request(TransformConfigUpdate.fromXContent(parser), id, deferValidation);
}
/**
* More complex validations with how {@link DataFrameTransformConfig#getDestination()} and
* {@link DataFrameTransformConfig#getSource()} relate are done in the update transport handler.
* More complex validations with how {@link TransformConfig#getDestination()} and
* {@link TransformConfig#getSource()} relate are done in the update transport handler.
*/
@Override
public ActionRequestValidationException validate() {
@ -85,11 +85,11 @@ public class UpdateDataFrameTransformAction extends ActionType<UpdateDataFrameTr
if (frequency != null) {
if (frequency.compareTo(MIN_FREQUENCY) < 0) {
validationException = addValidationError(
"minimum permitted [" + DataFrameField.FREQUENCY + "] is [" + MIN_FREQUENCY.getStringRep() + "]",
"minimum permitted [" + TransformField.FREQUENCY + "] is [" + MIN_FREQUENCY.getStringRep() + "]",
validationException);
} else if (frequency.compareTo(MAX_FREQUENCY) > 0) {
validationException = addValidationError(
"highest permitted [" + DataFrameField.FREQUENCY + "] is [" + MAX_FREQUENCY.getStringRep() + "]",
"highest permitted [" + TransformField.FREQUENCY + "] is [" + MAX_FREQUENCY.getStringRep() + "]",
validationException);
}
}
@ -105,7 +105,7 @@ public class UpdateDataFrameTransformAction extends ActionType<UpdateDataFrameTr
return deferValidation;
}
public DataFrameTransformConfigUpdate getUpdate() {
public TransformConfigUpdate getUpdate() {
return update;
}
@ -139,14 +139,14 @@ public class UpdateDataFrameTransformAction extends ActionType<UpdateDataFrameTr
public static class Response extends ActionResponse implements ToXContentObject {
private final DataFrameTransformConfig config;
private final TransformConfig config;
public Response(DataFrameTransformConfig config) {
public Response(TransformConfig config) {
this.config = config;
}
public Response(StreamInput in) throws IOException {
this.config = new DataFrameTransformConfig(in);
this.config = new TransformConfig(in);
}
@Override

View File

@ -9,17 +9,17 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage;
import org.elasticsearch.xpack.core.common.notifications.Level;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.util.Date;
public class DataFrameAuditMessage extends AbstractAuditMessage {
public class TransformAuditMessage extends AbstractAuditMessage {
private static final ParseField TRANSFORM_ID = new ParseField(DataFrameField.TRANSFORM_ID);
public static final ConstructingObjectParser<DataFrameAuditMessage, Void> PARSER =
createParser("data_frame_audit_message", DataFrameAuditMessage::new, TRANSFORM_ID);
private static final ParseField TRANSFORM_ID = new ParseField(TransformField.TRANSFORM_ID);
public static final ConstructingObjectParser<TransformAuditMessage, Void> PARSER =
createParser("data_frame_audit_message", TransformAuditMessage::new, TRANSFORM_ID);
public DataFrameAuditMessage(String resourceId, String message, Level level, Date timestamp, String nodeName) {
public TransformAuditMessage(String resourceId, String message, Level level, Date timestamp, String nodeName) {
super(resourceId, message, level, timestamp, nodeName);
}

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import java.io.IOException;
import java.util.Collections;
@ -85,7 +85,7 @@ public class QueryConfig extends AbstractDiffable<QueryConfig> implements Writea
query = AbstractQueryBuilder.parseInnerQueryBuilder(sourceParser);
} catch (Exception e) {
if (lenient) {
logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY, e);
logger.warn(TransformMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY, e);
} else {
throw e;
}

View File

@ -19,7 +19,7 @@ public interface SyncConfig extends ToXContentObject, NamedWriteable {
*/
boolean isValid();
QueryBuilder getRangeQuery(DataFrameTransformCheckpoint newCheckpoint);
QueryBuilder getRangeQuery(TransformCheckpoint newCheckpoint);
QueryBuilder getRangeQuery(DataFrameTransformCheckpoint oldCheckpoint, DataFrameTransformCheckpoint newCheckpoint);
QueryBuilder getRangeQuery(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint);
}

View File

@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Objects;
@ -43,10 +43,10 @@ public class TimeSyncConfig implements SyncConfig {
TimeValue delay = (TimeValue) args[1];
return new TimeSyncConfig(field, delay);
});
parser.declareString(constructorArg(), DataFrameField.FIELD);
parser.declareString(constructorArg(), TransformField.FIELD);
parser.declareField(optionalConstructorArg(),
(p, c) -> TimeValue.parseTimeValue(p.text(), DEFAULT_DELAY, DataFrameField.DELAY.getPreferredName()),
DataFrameField.DELAY,
(p, c) -> TimeValue.parseTimeValue(p.text(), DEFAULT_DELAY, TransformField.DELAY.getPreferredName()),
TransformField.DELAY,
ObjectParser.ValueType.STRING);
return parser;
}
@ -56,7 +56,7 @@ public class TimeSyncConfig implements SyncConfig {
}
public TimeSyncConfig(final String field, final TimeValue delay) {
this.field = ExceptionsHelper.requireNonNull(field, DataFrameField.FIELD.getPreferredName());
this.field = ExceptionsHelper.requireNonNull(field, TransformField.FIELD.getPreferredName());
this.delay = delay == null ? DEFAULT_DELAY : delay;
}
@ -87,8 +87,8 @@ public class TimeSyncConfig implements SyncConfig {
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.FIELD.getPreferredName(), field);
builder.field(DataFrameField.DELAY.getPreferredName(), delay.getStringRep());
builder.field(TransformField.FIELD.getPreferredName(), field);
builder.field(TransformField.DELAY.getPreferredName(), delay.getStringRep());
builder.endObject();
return builder;
}
@ -129,16 +129,16 @@ public class TimeSyncConfig implements SyncConfig {
@Override
public String getWriteableName() {
return DataFrameField.TIME_BASED_SYNC.getPreferredName();
return TransformField.TIME_BASED_SYNC.getPreferredName();
}
@Override
public QueryBuilder getRangeQuery(DataFrameTransformCheckpoint newCheckpoint) {
public QueryBuilder getRangeQuery(TransformCheckpoint newCheckpoint) {
return new RangeQueryBuilder(field).lt(newCheckpoint.getTimeUpperBound()).format("epoch_millis");
}
@Override
public QueryBuilder getRangeQuery(DataFrameTransformCheckpoint oldCheckpoint, DataFrameTransformCheckpoint newCheckpoint) {
public QueryBuilder getRangeQuery(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint) {
return new RangeQueryBuilder(field).gte(oldCheckpoint.getTimeUpperBound()).lt(newCheckpoint.getTimeUpperBound())
.format("epoch_millis");
}

View File

@ -16,41 +16,41 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Objects;
public class DataFrameTransform extends AbstractDiffable<DataFrameTransform> implements XPackPlugin.XPackPersistentTaskParams {
public class Transform extends AbstractDiffable<Transform> implements XPackPlugin.XPackPersistentTaskParams {
public static final String NAME = DataFrameField.TASK_NAME;
public static final ParseField FREQUENCY = DataFrameField.FREQUENCY;
public static final String NAME = TransformField.TASK_NAME;
public static final ParseField FREQUENCY = TransformField.FREQUENCY;
private final String transformId;
private final Version version;
private final TimeValue frequency;
public static final ConstructingObjectParser<DataFrameTransform, Void> PARSER = new ConstructingObjectParser<>(NAME, true,
a -> new DataFrameTransform((String) a[0], (String) a[1], (String) a[2]));
public static final ConstructingObjectParser<Transform, Void> PARSER = new ConstructingObjectParser<>(NAME, true,
a -> new Transform((String) a[0], (String) a[1], (String) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DataFrameField.VERSION);
PARSER.declareString(ConstructingObjectParser.constructorArg(), TransformField.ID);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TransformField.VERSION);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FREQUENCY);
}
private DataFrameTransform(String transformId, String version, String frequency) {
private Transform(String transformId, String version, String frequency) {
this(transformId, version == null ? null : Version.fromString(version),
frequency == null ? null : TimeValue.parseTimeValue(frequency, FREQUENCY.getPreferredName()));
}
public DataFrameTransform(String transformId, Version version, TimeValue frequency) {
public Transform(String transformId, Version version, TimeValue frequency) {
this.transformId = transformId;
this.version = version == null ? Version.V_7_2_0 : version;
this.frequency = frequency;
}
public DataFrameTransform(StreamInput in) throws IOException {
public Transform(StreamInput in) throws IOException {
this.transformId = in.readString();
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
this.version = Version.readVersion(in);
@ -88,8 +88,8 @@ public class DataFrameTransform extends AbstractDiffable<DataFrameTransform> imp
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.ID.getPreferredName(), transformId);
builder.field(DataFrameField.VERSION.getPreferredName(), version);
builder.field(TransformField.ID.getPreferredName(), transformId);
builder.field(TransformField.VERSION.getPreferredName(), version);
if (frequency != null) {
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
@ -109,7 +109,7 @@ public class DataFrameTransform extends AbstractDiffable<DataFrameTransform> imp
return frequency;
}
public static DataFrameTransform fromXContent(XContentParser parser) throws IOException {
public static Transform fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@ -123,7 +123,7 @@ public class DataFrameTransform extends AbstractDiffable<DataFrameTransform> imp
return false;
}
DataFrameTransform that = (DataFrameTransform) other;
Transform that = (Transform) other;
return Objects.equals(this.transformId, that.transformId)
&& Objects.equals(this.version, that.version)

View File

@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Arrays;
@ -30,7 +30,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Checkpoint document to store the checkpoint of a data frame transform
* Checkpoint document to store the checkpoint of a transform
*
* The fields:
*
@ -40,9 +40,9 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
* time_upper_bound for time-based indices this holds the upper time boundary of this checkpoint
*
*/
public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject {
public class TransformCheckpoint implements Writeable, ToXContentObject {
public static DataFrameTransformCheckpoint EMPTY = new DataFrameTransformCheckpoint("empty", 0L, -1L, Collections.emptyMap(), 0L);
public static TransformCheckpoint EMPTY = new TransformCheckpoint("empty", 0L, -1L, Collections.emptyMap(), 0L);
// the own checkpoint
public static final ParseField CHECKPOINT = new ParseField("checkpoint");
@ -52,8 +52,8 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
private static final String NAME = "data_frame_transform_checkpoint";
private static final ConstructingObjectParser<DataFrameTransformCheckpoint, Void> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<DataFrameTransformCheckpoint, Void> LENIENT_PARSER = createParser(true);
private static final ConstructingObjectParser<TransformCheckpoint, Void> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<TransformCheckpoint, Void> LENIENT_PARSER = createParser(true);
private final String transformId;
private final long timestampMillis;
@ -61,8 +61,8 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
private final Map<String, long[]> indicesCheckpoints;
private final long timeUpperBoundMillis;
private static ConstructingObjectParser<DataFrameTransformCheckpoint, Void> createParser(boolean lenient) {
ConstructingObjectParser<DataFrameTransformCheckpoint, Void> parser = new ConstructingObjectParser<>(NAME,
private static ConstructingObjectParser<TransformCheckpoint, Void> createParser(boolean lenient) {
ConstructingObjectParser<TransformCheckpoint, Void> parser = new ConstructingObjectParser<>(NAME,
lenient, args -> {
String id = (String) args[0];
long timestamp = (Long) args[1];
@ -74,13 +74,13 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
Long timeUpperBound = (Long) args[4];
// ignored, only for internal storage: String docType = (String) args[5];
return new DataFrameTransformCheckpoint(id, timestamp, checkpoint, checkpoints, timeUpperBound);
return new TransformCheckpoint(id, timestamp, checkpoint, checkpoints, timeUpperBound);
});
parser.declareString(constructorArg(), DataFrameField.ID);
parser.declareString(constructorArg(), TransformField.ID);
// note: this is never parsed from the outside where timestamp can be formatted as date time
parser.declareLong(constructorArg(), DataFrameField.TIMESTAMP_MILLIS);
parser.declareLong(constructorArg(), TransformField.TIMESTAMP_MILLIS);
parser.declareLong(constructorArg(), CHECKPOINT);
parser.declareObject(constructorArg(), (p,c) -> {
@ -102,13 +102,13 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
}
return checkPointsByIndexName;
}, INDICES);
parser.declareLong(optionalConstructorArg(), DataFrameField.TIME_UPPER_BOUND_MILLIS);
parser.declareString(optionalConstructorArg(), DataFrameField.INDEX_DOC_TYPE);
parser.declareLong(optionalConstructorArg(), TransformField.TIME_UPPER_BOUND_MILLIS);
parser.declareString(optionalConstructorArg(), TransformField.INDEX_DOC_TYPE);
return parser;
}
public DataFrameTransformCheckpoint(String transformId, long timestamp, long checkpoint, Map<String, long[]> checkpoints,
public TransformCheckpoint(String transformId, long timestamp, long checkpoint, Map<String, long[]> checkpoints,
Long timeUpperBound) {
this.transformId = Objects.requireNonNull(transformId);
this.timestampMillis = timestamp;
@ -117,7 +117,7 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
this.timeUpperBoundMillis = timeUpperBound == null ? 0 : timeUpperBound;
}
public DataFrameTransformCheckpoint(StreamInput in) throws IOException {
public TransformCheckpoint(StreamInput in) throws IOException {
this.transformId = in.readString();
this.timestampMillis = in.readLong();
this.checkpoint = in.readLong();
@ -151,19 +151,19 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.ID.getPreferredName(), transformId);
builder.field(TransformField.ID.getPreferredName(), transformId);
builder.field(CHECKPOINT.getPreferredName(), checkpoint);
builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME);
builder.field(TransformField.INDEX_DOC_TYPE.getPreferredName(), NAME);
builder.startObject(INDICES.getPreferredName());
for (Entry<String, long[]> entry : indicesCheckpoints.entrySet()) {
builder.array(entry.getKey(), entry.getValue());
}
builder.endObject();
builder.field(DataFrameField.TIMESTAMP_MILLIS.getPreferredName(), timestampMillis);
builder.field(TransformField.TIMESTAMP_MILLIS.getPreferredName(), timestampMillis);
if (timeUpperBoundMillis > 0) {
builder.field(DataFrameField.TIME_UPPER_BOUND_MILLIS.getPreferredName(), timeUpperBoundMillis);
builder.field(TransformField.TIME_UPPER_BOUND_MILLIS.getPreferredName(), timeUpperBoundMillis);
}
builder.endObject();
@ -209,7 +209,7 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
return false;
}
final DataFrameTransformCheckpoint that = (DataFrameTransformCheckpoint) other;
final TransformCheckpoint that = (TransformCheckpoint) other;
// compare the timestamp, id, checkpoint and than call matches for the rest
return this.timestampMillis == that.timestampMillis && this.checkpoint == that.checkpoint
@ -219,12 +219,12 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
/**
* Compares 2 checkpoints ignoring some inner fields.
*
* This is for comparing 2 checkpoints to check whether the data frame transform requires an update
* This is for comparing 2 checkpoints to check whether the transform requires an update
*
* @param that other checkpoint
* @return true if checkpoints match
*/
public boolean matches (DataFrameTransformCheckpoint that) {
public boolean matches (TransformCheckpoint that) {
if (this == that) {
return true;
}
@ -246,7 +246,7 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
return hash;
}
public static DataFrameTransformCheckpoint fromXContent(final XContentParser parser, boolean lenient) throws IOException {
public static TransformCheckpoint fromXContent(final XContentParser parser, boolean lenient) throws IOException {
return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null);
}
@ -270,7 +270,7 @@ public class DataFrameTransformCheckpoint implements Writeable, ToXContentObject
*
* @return count number of operations the checkpoint is behind or -1L if it could not calculate the difference
*/
public static long getBehind(DataFrameTransformCheckpoint oldCheckpoint, DataFrameTransformCheckpoint newCheckpoint) {
public static long getBehind(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint) {
if (oldCheckpoint.isTransient()) {
if (newCheckpoint.isTransient() == false) {
throw new IllegalArgumentException("can not compare transient against a non transient checkpoint");

View File

@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Objects;
@ -26,37 +26,37 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
*
* This is the user-facing side of DataFrameTransformCheckpoint, containing only the stats to be exposed.
*/
public class DataFrameTransformCheckpointStats implements Writeable, ToXContentObject {
public class TransformCheckpointStats implements Writeable, ToXContentObject {
public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L);
public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L);
private final long checkpoint;
private final DataFrameIndexerPosition position;
private final DataFrameTransformProgress checkpointProgress;
private final TransformIndexerPosition position;
private final TransformProgress checkpointProgress;
private final long timestampMillis;
private final long timeUpperBoundMillis;
static final ConstructingObjectParser<DataFrameTransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
static final ConstructingObjectParser<TransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
"data_frame_transform_checkpoint_stats", true, args -> {
long checkpoint = args[0] == null ? 0L : (Long) args[0];
DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1];
DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2];
TransformIndexerPosition position = (TransformIndexerPosition) args[1];
TransformProgress checkpointProgress = (TransformProgress) args[2];
long timestamp = args[3] == null ? 0L : (Long) args[3];
long timeUpperBound = args[4] == null ? 0L : (Long) args[4];
return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
});
static {
LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.CHECKPOINT);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, DataFrameField.POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, DataFrameField.CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.TIME_UPPER_BOUND_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TransformField.CHECKPOINT);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, TransformField.POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, TransformField.CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TransformField.TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TransformField.TIME_UPPER_BOUND_MILLIS);
}
public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position,
final DataFrameTransformProgress checkpointProgress, final long timestampMillis,
public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position,
final TransformProgress checkpointProgress, final long timestampMillis,
final long timeUpperBoundMillis) {
this.checkpoint = checkpoint;
this.position = position;
@ -65,16 +65,16 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO
this.timeUpperBoundMillis = timeUpperBoundMillis;
}
public DataFrameTransformCheckpointStats(StreamInput in) throws IOException {
public TransformCheckpointStats(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
this.checkpoint = in.readVLong();
if (in.readBoolean()) {
this.position = new DataFrameIndexerPosition(in);
this.position = new TransformIndexerPosition(in);
} else {
this.position = null;
}
if (in.readBoolean()) {
this.checkpointProgress = new DataFrameTransformProgress(in);
this.checkpointProgress = new TransformProgress(in);
} else {
this.checkpointProgress = null;
}
@ -91,11 +91,11 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO
return checkpoint;
}
public DataFrameIndexerPosition getPosition() {
public TransformIndexerPosition getPosition() {
return position;
}
public DataFrameTransformProgress getCheckpointProgress() {
public TransformProgress getCheckpointProgress() {
return checkpointProgress;
}
@ -110,19 +110,19 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.CHECKPOINT.getPreferredName(), checkpoint);
builder.field(TransformField.CHECKPOINT.getPreferredName(), checkpoint);
if (position != null) {
builder.field(DataFrameField.POSITION.getPreferredName(), position);
builder.field(TransformField.POSITION.getPreferredName(), position);
}
if (checkpointProgress != null) {
builder.field(DataFrameField.CHECKPOINT_PROGRESS.getPreferredName(), checkpointProgress);
builder.field(TransformField.CHECKPOINT_PROGRESS.getPreferredName(), checkpointProgress);
}
if (timestampMillis > 0) {
builder.timeField(DataFrameField.TIMESTAMP_MILLIS.getPreferredName(), DataFrameField.TIMESTAMP.getPreferredName(),
builder.timeField(TransformField.TIMESTAMP_MILLIS.getPreferredName(), TransformField.TIMESTAMP.getPreferredName(),
timestampMillis);
}
if (timeUpperBoundMillis > 0) {
builder.timeField(DataFrameField.TIME_UPPER_BOUND_MILLIS.getPreferredName(), DataFrameField.TIME_UPPER_BOUND.getPreferredName(),
builder.timeField(TransformField.TIME_UPPER_BOUND_MILLIS.getPreferredName(), TransformField.TIME_UPPER_BOUND.getPreferredName(),
timeUpperBoundMillis);
}
builder.endObject();
@ -165,7 +165,7 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO
return false;
}
DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other;
TransformCheckpointStats that = (TransformCheckpointStats) other;
return this.checkpoint == that.checkpoint
&& Objects.equals(this.position, that.position)
@ -174,7 +174,7 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO
&& this.timeUpperBoundMillis == that.timeUpperBoundMillis;
}
public static DataFrameTransformCheckpointStats fromXContent(XContentParser p) {
public static TransformCheckpointStats fromXContent(XContentParser p) {
return LENIENT_PARSER.apply(p, null);
}
}

View File

@ -29,11 +29,11 @@ import java.util.Objects;
* - the in progress checkpoint
* - the current state of the source
*/
public class DataFrameTransformCheckpointingInfo implements Writeable, ToXContentObject {
public class TransformCheckpointingInfo implements Writeable, ToXContentObject {
public static final DataFrameTransformCheckpointingInfo EMPTY = new DataFrameTransformCheckpointingInfo(
DataFrameTransformCheckpointStats.EMPTY,
DataFrameTransformCheckpointStats.EMPTY,
public static final TransformCheckpointingInfo EMPTY = new TransformCheckpointingInfo(
TransformCheckpointStats.EMPTY,
TransformCheckpointStats.EMPTY,
0L,
null);
@ -41,30 +41,30 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
public static final ParseField NEXT_CHECKPOINT = new ParseField("next");
public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind");
public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at");
private final DataFrameTransformCheckpointStats last;
private final DataFrameTransformCheckpointStats next;
private final TransformCheckpointStats last;
private final TransformCheckpointStats next;
private final long operationsBehind;
private Instant changesLastDetectedAt;
private static final ConstructingObjectParser<DataFrameTransformCheckpointingInfo, Void> LENIENT_PARSER =
private static final ConstructingObjectParser<TransformCheckpointingInfo, Void> LENIENT_PARSER =
new ConstructingObjectParser<>(
"data_frame_transform_checkpointing_info",
true,
a -> {
long behind = a[2] == null ? 0L : (Long) a[2];
Instant changesLastDetectedAt = (Instant)a[3];
return new DataFrameTransformCheckpointingInfo(
a[0] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[0],
a[1] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[1],
return new TransformCheckpointingInfo(
a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0],
a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1],
behind,
changesLastDetectedAt);
});
static {
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
DataFrameTransformCheckpointStats.LENIENT_PARSER::apply, LAST_CHECKPOINT);
TransformCheckpointStats.LENIENT_PARSER::apply, LAST_CHECKPOINT);
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
DataFrameTransformCheckpointStats.LENIENT_PARSER::apply, NEXT_CHECKPOINT);
TransformCheckpointStats.LENIENT_PARSER::apply, NEXT_CHECKPOINT);
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND);
LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
p -> TimeUtils.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()),
@ -81,8 +81,8 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
* @param operationsBehind counter of operations the current checkpoint is behind source
* @param changesLastDetectedAt the last time the source indices were checked for changes
*/
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last,
DataFrameTransformCheckpointStats next,
public TransformCheckpointingInfo(TransformCheckpointStats last,
TransformCheckpointStats next,
long operationsBehind,
Instant changesLastDetectedAt) {
this.last = Objects.requireNonNull(last);
@ -91,26 +91,26 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
this.changesLastDetectedAt = changesLastDetectedAt == null ? null : Instant.ofEpochMilli(changesLastDetectedAt.toEpochMilli());
}
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last,
DataFrameTransformCheckpointStats next,
public TransformCheckpointingInfo(TransformCheckpointStats last,
TransformCheckpointStats next,
long operationsBehind) {
this(last, next, operationsBehind, null);
}
public DataFrameTransformCheckpointingInfo(StreamInput in) throws IOException {
last = new DataFrameTransformCheckpointStats(in);
next = new DataFrameTransformCheckpointStats(in);
public TransformCheckpointingInfo(StreamInput in) throws IOException {
last = new TransformCheckpointStats(in);
next = new TransformCheckpointStats(in);
operationsBehind = in.readLong();
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
changesLastDetectedAt = in.readOptionalInstant();
}
}
public DataFrameTransformCheckpointStats getLast() {
public TransformCheckpointStats getLast() {
return last;
}
public DataFrameTransformCheckpointStats getNext() {
public TransformCheckpointStats getNext() {
return next;
}
@ -122,7 +122,7 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
return changesLastDetectedAt;
}
public DataFrameTransformCheckpointingInfo setChangesLastDetectedAt(Instant changesLastDetectedAt) {
public TransformCheckpointingInfo setChangesLastDetectedAt(Instant changesLastDetectedAt) {
this.changesLastDetectedAt = Instant.ofEpochMilli(Objects.requireNonNull(changesLastDetectedAt).toEpochMilli());
return this;
}
@ -154,7 +154,7 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
}
}
public static DataFrameTransformCheckpointingInfo fromXContent(XContentParser p) {
public static TransformCheckpointingInfo fromXContent(XContentParser p) {
return LENIENT_PARSER.apply(p, null);
}
@ -173,7 +173,7 @@ public class DataFrameTransformCheckpointingInfo implements Writeable, ToXConten
return false;
}
DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other;
TransformCheckpointingInfo that = (TransformCheckpointingInfo) other;
return Objects.equals(this.last, that.last) &&
Objects.equals(this.next, that.next) &&

View File

@ -22,8 +22,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.xpack.core.common.time.TimeUtils;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
@ -39,7 +39,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
/**
* This class holds the configuration details of a data frame transform
*/
public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransformConfig> implements Writeable, ToXContentObject {
public class TransformConfig extends AbstractDiffable<TransformConfig> implements Writeable, ToXContentObject {
public static final String NAME = "data_frame_transform_config";
public static final ParseField HEADERS = new ParseField("headers");
@ -47,8 +47,8 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
// types of transforms
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");
private static final ConstructingObjectParser<DataFrameTransformConfig, String> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<DataFrameTransformConfig, String> LENIENT_PARSER = createParser(true);
private static final ConstructingObjectParser<TransformConfig, String> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<TransformConfig, String> LENIENT_PARSER = createParser(true);
static final int MAX_DESCRIPTION_LENGTH = 1_000;
private final String id;
@ -71,8 +71,8 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
}
}
private static ConstructingObjectParser<DataFrameTransformConfig, String> createParser(boolean lenient) {
ConstructingObjectParser<DataFrameTransformConfig, String> parser = new ConstructingObjectParser<>(NAME, lenient,
private static ConstructingObjectParser<TransformConfig, String> createParser(boolean lenient) {
ConstructingObjectParser<TransformConfig, String> parser = new ConstructingObjectParser<>(NAME, lenient,
(args, optionalId) -> {
String id = (String) args[0];
@ -81,14 +81,14 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
id = optionalId;
} else if (optionalId != null && id.equals(optionalId) == false) {
throw new IllegalArgumentException(
DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_INCONSISTENT_ID, id, optionalId));
TransformMessages.getMessage(TransformMessages.REST_PUT_DATA_FRAME_INCONSISTENT_ID, id, optionalId));
}
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
TimeValue frequency =
args[3] == null ? null : TimeValue.parseTimeValue((String) args[3], DataFrameField.FREQUENCY.getPreferredName());
args[3] == null ? null : TimeValue.parseTimeValue((String) args[3], TransformField.FREQUENCY.getPreferredName());
SyncConfig syncConfig = (SyncConfig) args[4];
// ignored, only for internal storage: String docType = (String) args[5];
@ -96,8 +96,8 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
// on strict parsing do not allow injection of headers, transform version, or create time
if (lenient == false) {
validateStrictParsingParams(args[6], HEADERS.getPreferredName());
validateStrictParsingParams(args[9], DataFrameField.CREATE_TIME.getPreferredName());
validateStrictParsingParams(args[10], DataFrameField.VERSION.getPreferredName());
validateStrictParsingParams(args[9], TransformField.CREATE_TIME.getPreferredName());
validateStrictParsingParams(args[10], TransformField.VERSION.getPreferredName());
}
@SuppressWarnings("unchecked")
@ -105,7 +105,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
PivotConfig pivotConfig = (PivotConfig) args[7];
String description = (String)args[8];
return new DataFrameTransformConfig(id,
return new TransformConfig(id,
source,
dest,
frequency,
@ -117,22 +117,22 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
(String)args[10]);
});
parser.declareString(optionalConstructorArg(), DataFrameField.ID);
parser.declareObject(constructorArg(), (p, c) -> SourceConfig.fromXContent(p, lenient), DataFrameField.SOURCE);
parser.declareObject(constructorArg(), (p, c) -> DestConfig.fromXContent(p, lenient), DataFrameField.DESTINATION);
parser.declareString(optionalConstructorArg(), DataFrameField.FREQUENCY);
parser.declareString(optionalConstructorArg(), TransformField.ID);
parser.declareObject(constructorArg(), (p, c) -> SourceConfig.fromXContent(p, lenient), TransformField.SOURCE);
parser.declareObject(constructorArg(), (p, c) -> DestConfig.fromXContent(p, lenient), TransformField.DESTINATION);
parser.declareString(optionalConstructorArg(), TransformField.FREQUENCY);
parser.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p, lenient), DataFrameField.SYNC);
parser.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p, lenient), TransformField.SYNC);
parser.declareString(optionalConstructorArg(), DataFrameField.INDEX_DOC_TYPE);
parser.declareString(optionalConstructorArg(), TransformField.INDEX_DOC_TYPE);
parser.declareObject(optionalConstructorArg(), (p, c) -> p.mapStrings(), HEADERS);
parser.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p, lenient), PIVOT_TRANSFORM);
parser.declareString(optionalConstructorArg(), DataFrameField.DESCRIPTION);
parser.declareString(optionalConstructorArg(), TransformField.DESCRIPTION);
parser.declareField(optionalConstructorArg(),
p -> TimeUtils.parseTimeFieldToInstant(p, DataFrameField.CREATE_TIME.getPreferredName()), DataFrameField.CREATE_TIME,
p -> TimeUtils.parseTimeFieldToInstant(p, TransformField.CREATE_TIME.getPreferredName()), TransformField.CREATE_TIME,
ObjectParser.ValueType.VALUE);
parser.declareString(optionalConstructorArg(), DataFrameField.VERSION);
parser.declareString(optionalConstructorArg(), TransformField.VERSION);
return parser;
}
@ -148,7 +148,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return NAME + "-" + transformId;
}
DataFrameTransformConfig(final String id,
TransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
@ -158,9 +158,9 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
final String description,
final Instant createTime,
final String version){
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
this.source = ExceptionsHelper.requireNonNull(source, DataFrameField.SOURCE.getPreferredName());
this.dest = ExceptionsHelper.requireNonNull(dest, DataFrameField.DESTINATION.getPreferredName());
this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName());
this.source = ExceptionsHelper.requireNonNull(source, TransformField.SOURCE.getPreferredName());
this.dest = ExceptionsHelper.requireNonNull(dest, TransformField.DESTINATION.getPreferredName());
this.frequency = frequency;
this.syncConfig = syncConfig;
this.setHeaders(headers == null ? Collections.emptyMap() : headers);
@ -169,7 +169,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
// at least one function must be defined
if (this.pivotConfig == null) {
throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM);
throw new IllegalArgumentException(TransformMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM);
}
if (this.description != null && this.description.length() > MAX_DESCRIPTION_LENGTH) {
throw new IllegalArgumentException("[description] must be less than 1000 characters in length.");
@ -178,7 +178,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
this.transformVersion = version == null ? null : Version.fromString(version);
}
public DataFrameTransformConfig(final String id,
public TransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
@ -189,7 +189,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
this(id, source, dest, frequency, syncConfig, headers, pivotConfig, description, null, null);
}
public DataFrameTransformConfig(final StreamInput in) throws IOException {
public TransformConfig(final StreamInput in) throws IOException {
id = in.readString();
source = new SourceConfig(in);
dest = new DestConfig(in);
@ -236,7 +236,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return headers;
}
public DataFrameTransformConfig setHeaders(Map<String, String> headers) {
public TransformConfig setHeaders(Map<String, String> headers) {
this.headers = headers;
return this;
}
@ -245,7 +245,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return transformVersion;
}
public DataFrameTransformConfig setVersion(Version transformVersion) {
public TransformConfig setVersion(Version transformVersion) {
this.transformVersion = transformVersion;
return this;
}
@ -254,8 +254,8 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return createTime;
}
public DataFrameTransformConfig setCreateTime(Instant createTime) {
ExceptionsHelper.requireNonNull(createTime, DataFrameField.CREATE_TIME.getPreferredName());
public TransformConfig setCreateTime(Instant createTime) {
ExceptionsHelper.requireNonNull(createTime, TransformField.CREATE_TIME.getPreferredName());
this.createTime = Instant.ofEpochMilli(createTime.toEpochMilli());
return this;
}
@ -307,34 +307,34 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.ID.getPreferredName(), id);
builder.field(DataFrameField.SOURCE.getPreferredName(), source);
builder.field(DataFrameField.DESTINATION.getPreferredName(), dest);
builder.field(TransformField.ID.getPreferredName(), id);
builder.field(TransformField.SOURCE.getPreferredName(), source);
builder.field(TransformField.DESTINATION.getPreferredName(), dest);
if (frequency != null) {
builder.field(DataFrameField.FREQUENCY.getPreferredName(), frequency.getStringRep());
builder.field(TransformField.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(DataFrameField.SYNC.getPreferredName());
builder.startObject(TransformField.SYNC.getPreferredName());
builder.field(syncConfig.getWriteableName(), syncConfig);
builder.endObject();
}
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
if (params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false)) {
builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME);
if (params.paramAsBoolean(TransformField.FOR_INTERNAL_STORAGE, false)) {
builder.field(TransformField.INDEX_DOC_TYPE.getPreferredName(), NAME);
}
if (headers.isEmpty() == false && params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false) == true) {
if (headers.isEmpty() == false && params.paramAsBoolean(TransformField.FOR_INTERNAL_STORAGE, false) == true) {
builder.field(HEADERS.getPreferredName(), headers);
}
if (description != null) {
builder.field(DataFrameField.DESCRIPTION.getPreferredName(), description);
builder.field(TransformField.DESCRIPTION.getPreferredName(), description);
}
if (transformVersion != null) {
builder.field(DataFrameField.VERSION.getPreferredName(), transformVersion);
builder.field(TransformField.VERSION.getPreferredName(), transformVersion);
}
if (createTime != null) {
builder.timeField(DataFrameField.CREATE_TIME.getPreferredName(), DataFrameField.CREATE_TIME.getPreferredName() + "_string",
builder.timeField(TransformField.CREATE_TIME.getPreferredName(), TransformField.CREATE_TIME.getPreferredName() + "_string",
createTime.toEpochMilli());
}
builder.endObject();
@ -351,7 +351,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return false;
}
final DataFrameTransformConfig that = (DataFrameTransformConfig) other;
final TransformConfig that = (TransformConfig) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
@ -375,7 +375,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return Strings.toString(this, true, true);
}
public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId,
public static TransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId,
boolean lenient) {
return lenient ? LENIENT_PARSER.apply(parser, optionalTransformId) : STRICT_PARSER.apply(parser, optionalTransformId);
@ -395,7 +395,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
public Builder() { }
public Builder(DataFrameTransformConfig config) {
public Builder(TransformConfig config) {
this.id = config.id;
this.source = config.source;
this.dest = config.dest;
@ -452,8 +452,8 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return this;
}
public DataFrameTransformConfig build() {
return new DataFrameTransformConfig(id,
public TransformConfig build() {
return new TransformConfig(id,
source,
dest,
frequency,
@ -475,7 +475,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
return false;
}
final DataFrameTransformConfig.Builder that = (DataFrameTransformConfig.Builder) other;
final TransformConfig.Builder that = (TransformConfig.Builder) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)

View File

@ -20,42 +20,42 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig.MAX_DESCRIPTION_LENGTH;
import static org.elasticsearch.xpack.core.transform.transforms.TransformConfig.MAX_DESCRIPTION_LENGTH;
/**
* This class holds the mutable configuration items for a data frame transform
*/
public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObject {
public class TransformConfigUpdate implements Writeable, ToXContentObject {
public static final String NAME = "data_frame_transform_config_update";
private static final ConstructingObjectParser<DataFrameTransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
private static final ConstructingObjectParser<TransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
false,
(args) -> {
SourceConfig source = (SourceConfig) args[0];
DestConfig dest = (DestConfig) args[1];
TimeValue frequency = args[2] == null ?
null :
TimeValue.parseTimeValue((String) args[2], DataFrameField.FREQUENCY.getPreferredName());
TimeValue.parseTimeValue((String) args[2], TransformField.FREQUENCY.getPreferredName());
SyncConfig syncConfig = (SyncConfig) args[3];
String description = (String) args[4];
return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description);
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description);
});
static {
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p, false), DataFrameField.SOURCE);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.fromXContent(p, false), DataFrameField.DESTINATION);
PARSER.declareString(optionalConstructorArg(), DataFrameField.FREQUENCY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), DataFrameField.SYNC);
PARSER.declareString(optionalConstructorArg(), DataFrameField.DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p, false), TransformField.SOURCE);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.fromXContent(p, false), TransformField.DESTINATION);
PARSER.declareString(optionalConstructorArg(), TransformField.FREQUENCY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformField.SYNC);
PARSER.declareString(optionalConstructorArg(), TransformField.DESCRIPTION);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
@ -73,7 +73,7 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
private final String description;
private Map<String, String> headers;
public DataFrameTransformConfigUpdate(final SourceConfig source,
public TransformConfigUpdate(final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
@ -88,7 +88,7 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
}
}
public DataFrameTransformConfigUpdate(final StreamInput in) throws IOException {
public TransformConfigUpdate(final StreamInput in) throws IOException {
source = in.readOptionalWriteable(SourceConfig::new);
dest = in.readOptionalWriteable(DestConfig::new);
frequency = in.readOptionalTimeValue();
@ -147,24 +147,24 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
if (source != null) {
builder.field(DataFrameField.SOURCE.getPreferredName(), source);
builder.field(TransformField.SOURCE.getPreferredName(), source);
}
if (dest != null) {
builder.field(DataFrameField.DESTINATION.getPreferredName(), dest);
builder.field(TransformField.DESTINATION.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(DataFrameField.FREQUENCY.getPreferredName(), frequency.getStringRep());
builder.field(TransformField.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(DataFrameField.SYNC.getPreferredName());
builder.startObject(TransformField.SYNC.getPreferredName());
builder.field(syncConfig.getWriteableName(), syncConfig);
builder.endObject();
}
if (description != null) {
builder.field(DataFrameField.DESCRIPTION.getPreferredName(), description);
builder.field(TransformField.DESCRIPTION.getPreferredName(), description);
}
if (headers != null) {
builder.field(DataFrameTransformConfig.HEADERS.getPreferredName(), headers);
builder.field(TransformConfig.HEADERS.getPreferredName(), headers);
}
builder.endObject();
return builder;
@ -180,7 +180,7 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
return false;
}
final DataFrameTransformConfigUpdate that = (DataFrameTransformConfigUpdate) other;
final TransformConfigUpdate that = (TransformConfigUpdate) other;
return Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
@ -200,11 +200,11 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
return Strings.toString(this, true, true);
}
public static DataFrameTransformConfigUpdate fromXContent(final XContentParser parser) {
public static TransformConfigUpdate fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
public boolean isNoop(DataFrameTransformConfig config) {
public boolean isNoop(TransformConfig config) {
return isNullOrEqual(source, config.getSource())
&& isNullOrEqual(dest, config.getDestination())
&& isNullOrEqual(frequency, config.getFrequency())
@ -217,11 +217,11 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
return lft == null || lft.equals(rgt);
}
public DataFrameTransformConfig apply(DataFrameTransformConfig config) {
public TransformConfig apply(TransformConfig config) {
if (isNoop(config)) {
return config;
}
DataFrameTransformConfig.Builder builder = new DataFrameTransformConfig.Builder(config);
TransformConfig.Builder builder = new TransformConfig.Builder(config);
if (source != null) {
builder.setSource(source);
}
@ -235,7 +235,7 @@ public class DataFrameTransformConfigUpdate implements Writeable, ToXContentObje
String currentConfigName = config.getSyncConfig() == null ? "null" : config.getSyncConfig().getWriteableName();
if (syncConfig.getWriteableName().equals(currentConfigName) == false) {
throw new ElasticsearchStatusException(
DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_UPDATE_CANNOT_CHANGE_SYNC_METHOD,
TransformMessages.getMessage(TransformMessages.DATA_FRAME_UPDATE_CANNOT_CHANGE_SYNC_METHOD,
config.getId(),
currentConfigName,
syncConfig.getWriteableName()),

View File

@ -24,7 +24,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameIndexerPosition implements Writeable, ToXContentObject {
public class TransformIndexerPosition implements Writeable, ToXContentObject {
public static final String NAME = "data_frame/indexer_position";
public static final ParseField INDEXER_POSITION = new ParseField("indexer_position");
@ -34,21 +34,21 @@ public class DataFrameIndexerPosition implements Writeable, ToXContentObject {
private final Map<String, Object> bucketPosition;
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<DataFrameIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(NAME,
public static final ConstructingObjectParser<TransformIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(NAME,
true,
args -> new DataFrameIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
args -> new TransformIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
static {
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT);
}
public DataFrameIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
public TransformIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition);
this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition);
}
public DataFrameIndexerPosition(StreamInput in) throws IOException {
public TransformIndexerPosition(StreamInput in) throws IOException {
Map<String, Object> position = in.readMap();
indexerPosition = position == null ? null : Collections.unmodifiableMap(position);
position = in.readMap();
@ -92,7 +92,7 @@ public class DataFrameIndexerPosition implements Writeable, ToXContentObject {
return false;
}
DataFrameIndexerPosition that = (DataFrameIndexerPosition) other;
TransformIndexerPosition that = (TransformIndexerPosition) other;
return Objects.equals(this.indexerPosition, that.indexerPosition) &&
Objects.equals(this.bucketPosition, that.bucketPosition);
@ -108,7 +108,7 @@ public class DataFrameIndexerPosition implements Writeable, ToXContentObject {
return Strings.toString(this);
}
public static DataFrameIndexerPosition fromXContent(XContentParser parser) {
public static TransformIndexerPosition fromXContent(XContentParser parser) {
try {
return PARSER.parse(parser, null);
} catch (IOException e) {

View File

@ -21,7 +21,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameIndexerTransformStats extends IndexerJobStats {
public class TransformIndexerStats extends IndexerJobStats {
private static final String DEFAULT_TRANSFORM_ID = "_all"; // TODO remove when no longer needed for wire BWC
@ -48,9 +48,9 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
private static final int EXP_AVG_WINDOW = 10;
private static final double ALPHA = 2.0/(EXP_AVG_WINDOW + 1);
private static final ConstructingObjectParser<DataFrameIndexerTransformStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
private static final ConstructingObjectParser<TransformIndexerStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
NAME, true,
args -> new DataFrameIndexerTransformStats(
args -> new TransformIndexerStats(
(long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], (long) args[5], (long) args[6],
(long) args[7], (long) args[8], (long) args[9], (Double) args[10], (Double) args[11], (Double) args[12]));
@ -76,11 +76,11 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
/**
* Create with all stats set to zero
*/
public DataFrameIndexerTransformStats() {
public TransformIndexerStats() {
super();
}
public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOutputDocuments,
public TransformIndexerStats(long numPages, long numInputDocuments, long numOutputDocuments,
long numInvocations, long indexTime, long searchTime, long indexTotal, long searchTotal,
long indexFailures, long searchFailures, Double expAvgCheckpointDurationMs,
Double expAvgDocumentsIndexed, Double expAvgDocumentsProcessed ) {
@ -91,14 +91,14 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
this.expAvgDocumentsProcessed = expAvgDocumentsProcessed == null ? 0.0 : expAvgDocumentsProcessed;
}
public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOutputDocuments,
public TransformIndexerStats(long numPages, long numInputDocuments, long numOutputDocuments,
long numInvocations, long indexTime, long searchTime, long indexTotal, long searchTotal,
long indexFailures, long searchFailures) {
this(numPages, numInputDocuments, numOutputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal,
indexFailures, searchFailures, 0.0, 0.0, 0.0);
}
public DataFrameIndexerTransformStats(DataFrameIndexerTransformStats other) {
public TransformIndexerStats(TransformIndexerStats other) {
this(other.numPages, other.numInputDocuments, other.numOuputDocuments, other.numInvocations,
other.indexTime, other.searchTime, other.indexTotal, other.searchTotal, other.indexFailures, other.searchFailures);
this.expAvgCheckpointDurationMs = other.expAvgCheckpointDurationMs;
@ -106,7 +106,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
this.expAvgDocumentsProcessed = other.expAvgDocumentsProcessed;
}
public DataFrameIndexerTransformStats(StreamInput in) throws IOException {
public TransformIndexerStats(StreamInput in) throws IOException {
super(in);
if (in.getVersion().before(Version.V_7_4_0)) {
in.readString(); // was transformId
@ -193,7 +193,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
return false;
}
DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other;
TransformIndexerStats that = (TransformIndexerStats) other;
return Objects.equals(this.numPages, that.numPages)
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
@ -217,7 +217,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
expAvgCheckpointDurationMs, expAvgDocumentsIndexed, expAvgDocumentsProcessed);
}
public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) {
public static TransformIndexerStats fromXContent(XContentParser parser) {
try {
return LENIENT_PARSER.parse(parser, null);
} catch (IOException e) {

View File

@ -21,7 +21,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformProgress implements Writeable, ToXContentObject {
public class TransformProgress implements Writeable, ToXContentObject {
public static final ParseField TOTAL_DOCS = new ParseField("total_docs");
public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining");
@ -29,10 +29,10 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject {
public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed");
public static final String PERCENT_COMPLETE = "percent_complete";
public static final ConstructingObjectParser<DataFrameTransformProgress, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<TransformProgress, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_progress",
true,
a -> new DataFrameTransformProgress((Long) a[0], (Long)a[1], (Long)a[2], (Long)a[3]));
a -> new TransformProgress((Long) a[0], (Long)a[1], (Long)a[2], (Long)a[3]));
static {
PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS);
@ -45,12 +45,12 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject {
private long documentsProcessed;
private long documentsIndexed;
public DataFrameTransformProgress() {
public TransformProgress() {
this(null, 0L, 0L);
}
// If we are reading from an old document we need to convert docsRemaining to docsProcessed
public DataFrameTransformProgress(Long totalDocs, Long docsRemaining, Long documentsProcessed, Long documentsIndexed) {
public TransformProgress(Long totalDocs, Long docsRemaining, Long documentsProcessed, Long documentsIndexed) {
this(totalDocs,
documentsProcessed != null ?
documentsProcessed :
@ -58,7 +58,7 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject {
documentsIndexed);
}
public DataFrameTransformProgress(Long totalDocs, Long documentsProcessed, Long documentsIndexed) {
public TransformProgress(Long totalDocs, Long documentsProcessed, Long documentsIndexed) {
if (totalDocs != null && totalDocs < 0) {
throw new IllegalArgumentException("[total_docs] must be >0.");
}
@ -73,13 +73,13 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject {
this.documentsIndexed = documentsIndexed == null ? 0 : documentsIndexed;
}
public DataFrameTransformProgress(DataFrameTransformProgress otherProgress) {
public TransformProgress(TransformProgress otherProgress) {
this.totalDocs = otherProgress.totalDocs;
this.documentsProcessed = otherProgress.documentsProcessed;
this.documentsIndexed = otherProgress.documentsIndexed;
}
public DataFrameTransformProgress(StreamInput in) throws IOException {
public TransformProgress(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
this.totalDocs = in.readOptionalLong();
this.documentsProcessed = in.readVLong();
@ -135,7 +135,7 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject {
return false;
}
DataFrameTransformProgress that = (DataFrameTransformProgress) other;
TransformProgress that = (TransformProgress) other;
return Objects.equals(this.documentsIndexed, that.documentsIndexed)
&& Objects.equals(this.totalDocs, that.totalDocs)
&& Objects.equals(this.documentsProcessed, that.documentsProcessed);

View File

@ -19,7 +19,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.persistent.PersistentTaskState;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Map;
@ -28,16 +28,16 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformState implements Task.Status, PersistentTaskState {
public static final String NAME = DataFrameField.TASK_NAME;
public class TransformState implements Task.Status, PersistentTaskState {
public static final String NAME = TransformField.TASK_NAME;
private final DataFrameTransformTaskState taskState;
private final TransformTaskState taskState;
private final IndexerState indexerState;
private final DataFrameTransformProgress progress;
private final TransformProgress progress;
private final long checkpoint;
@Nullable
private final DataFrameIndexerPosition position;
private final TransformIndexerPosition position;
@Nullable
private final String reason;
@Nullable
@ -55,44 +55,44 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
public static final ParseField NODE = new ParseField("node");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<DataFrameTransformState, Void> PARSER = new ConstructingObjectParser<>(NAME,
public static final ConstructingObjectParser<TransformState, Void> PARSER = new ConstructingObjectParser<>(NAME,
true,
args -> {
DataFrameTransformTaskState taskState = (DataFrameTransformTaskState) args[0];
TransformTaskState taskState = (TransformTaskState) args[0];
IndexerState indexerState = (IndexerState) args[1];
Map<String, Object> bwcCurrentPosition = (Map<String, Object>) args[2];
DataFrameIndexerPosition dataFrameIndexerPosition = (DataFrameIndexerPosition) args[3];
TransformIndexerPosition transformIndexerPosition = (TransformIndexerPosition) args[3];
// BWC handling, translate current_position to position iff position isn't set
if (bwcCurrentPosition != null && dataFrameIndexerPosition == null) {
dataFrameIndexerPosition = new DataFrameIndexerPosition(bwcCurrentPosition, null);
if (bwcCurrentPosition != null && transformIndexerPosition == null) {
transformIndexerPosition = new TransformIndexerPosition(bwcCurrentPosition, null);
}
long checkpoint = (long) args[4];
String reason = (String) args[5];
DataFrameTransformProgress progress = (DataFrameTransformProgress) args[6];
TransformProgress progress = (TransformProgress) args[6];
NodeAttributes node = (NodeAttributes) args[7];
return new DataFrameTransformState(taskState, indexerState, dataFrameIndexerPosition, checkpoint, reason, progress, node);
return new TransformState(taskState, indexerState, transformIndexerPosition, checkpoint, reason, progress, node);
});
static {
PARSER.declareField(constructorArg(), p -> DataFrameTransformTaskState.fromString(p.text()), TASK_STATE, ValueType.STRING);
PARSER.declareField(constructorArg(), p -> TransformTaskState.fromString(p.text()), TASK_STATE, ValueType.STRING);
PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), INDEXER_STATE, ValueType.STRING);
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, CURRENT_POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), DataFrameIndexerPosition::fromXContent, POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), TransformIndexerPosition::fromXContent, POSITION, ValueType.OBJECT);
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CHECKPOINT);
PARSER.declareString(optionalConstructorArg(), REASON);
PARSER.declareField(optionalConstructorArg(), DataFrameTransformProgress.PARSER::apply, PROGRESS, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), TransformProgress.PARSER::apply, PROGRESS, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE, ValueType.OBJECT);
}
public DataFrameTransformState(DataFrameTransformTaskState taskState,
public TransformState(TransformTaskState taskState,
IndexerState indexerState,
@Nullable DataFrameIndexerPosition position,
@Nullable TransformIndexerPosition position,
long checkpoint,
@Nullable String reason,
@Nullable DataFrameTransformProgress progress,
@Nullable TransformProgress progress,
@Nullable NodeAttributes node) {
this.taskState = taskState;
this.indexerState = indexerState;
@ -103,27 +103,27 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
this.node = node;
}
public DataFrameTransformState(DataFrameTransformTaskState taskState,
public TransformState(TransformTaskState taskState,
IndexerState indexerState,
@Nullable DataFrameIndexerPosition position,
@Nullable TransformIndexerPosition position,
long checkpoint,
@Nullable String reason,
@Nullable DataFrameTransformProgress progress) {
@Nullable TransformProgress progress) {
this(taskState, indexerState, position, checkpoint, reason, progress, null);
}
public DataFrameTransformState(StreamInput in) throws IOException {
taskState = DataFrameTransformTaskState.fromStream(in);
public TransformState(StreamInput in) throws IOException {
taskState = TransformTaskState.fromStream(in);
indexerState = IndexerState.fromStream(in);
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
position = in.readOptionalWriteable(DataFrameIndexerPosition::new);
position = in.readOptionalWriteable(TransformIndexerPosition::new);
} else {
Map<String, Object> pos = in.readMap();
position = new DataFrameIndexerPosition(pos, null);
position = new TransformIndexerPosition(pos, null);
}
checkpoint = in.readLong();
reason = in.readOptionalString();
progress = in.readOptionalWriteable(DataFrameTransformProgress::new);
progress = in.readOptionalWriteable(TransformProgress::new);
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
node = in.readOptionalWriteable(NodeAttributes::new);
} else {
@ -131,7 +131,7 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
}
}
public DataFrameTransformTaskState getTaskState() {
public TransformTaskState getTaskState() {
return taskState;
}
@ -139,7 +139,7 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
return indexerState;
}
public DataFrameIndexerPosition getPosition() {
public TransformIndexerPosition getPosition() {
return position;
}
@ -147,7 +147,7 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
return checkpoint;
}
public DataFrameTransformProgress getProgress() {
public TransformProgress getProgress() {
return progress;
}
@ -159,12 +159,12 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
return node;
}
public DataFrameTransformState setNode(NodeAttributes node) {
public TransformState setNode(NodeAttributes node) {
this.node = node;
return this;
}
public static DataFrameTransformState fromXContent(XContentParser parser) {
public static TransformState fromXContent(XContentParser parser) {
try {
return PARSER.parse(parser, null);
} catch (IOException e) {
@ -226,7 +226,7 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState
return false;
}
DataFrameTransformState that = (DataFrameTransformState) other;
TransformState that = (TransformState) other;
return Objects.equals(this.taskState, that.taskState) &&
Objects.equals(this.indexerState, that.indexerState) &&

View File

@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Locale;
@ -34,7 +34,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
* Objects of this class are expected to be ephemeral.
* Do not persist objects of this class to cluster state or an index.
*/
public class DataFrameTransformStats implements Writeable, ToXContentObject {
public class TransformStats implements Writeable, ToXContentObject {
public static final String NAME = "data_frame_transform_stats";
public static final ParseField STATE_FIELD = new ParseField("state");
@ -48,52 +48,52 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
private final String reason;
@Nullable
private NodeAttributes node;
private final DataFrameIndexerTransformStats indexerStats;
private final DataFrameTransformCheckpointingInfo checkpointingInfo;
private final TransformIndexerStats indexerStats;
private final TransformCheckpointingInfo checkpointingInfo;
public static final ConstructingObjectParser<DataFrameTransformStats, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<TransformStats, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new DataFrameTransformStats((String) a[0],
a -> new TransformStats((String) a[0],
(State) a[1],
(String) a[2],
(NodeAttributes) a[3],
(DataFrameIndexerTransformStats) a[4],
(DataFrameTransformCheckpointingInfo) a[5]));
(TransformIndexerStats) a[4],
(TransformCheckpointingInfo) a[5]));
static {
PARSER.declareString(constructorArg(), DataFrameField.ID);
PARSER.declareField(constructorArg(), p -> DataFrameTransformStats.State.fromString(p.text()), STATE_FIELD,
PARSER.declareString(constructorArg(), TransformField.ID);
PARSER.declareField(constructorArg(), p -> TransformStats.State.fromString(p.text()), STATE_FIELD,
ObjectParser.ValueType.STRING);
PARSER.declareString(optionalConstructorArg(), REASON_FIELD);
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareObject(constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p),
DataFrameField.STATS_FIELD);
PARSER.declareObject(constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p),
TransformField.STATS_FIELD);
PARSER.declareObject(constructorArg(),
(p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
(p, c) -> TransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
}
public static DataFrameTransformStats fromXContent(XContentParser parser) throws IOException {
public static TransformStats fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public static DataFrameTransformStats initialStats(String id) {
return stoppedStats(id, new DataFrameIndexerTransformStats());
public static TransformStats initialStats(String id) {
return stoppedStats(id, new TransformIndexerStats());
}
public static DataFrameTransformStats stoppedStats(String id, DataFrameIndexerTransformStats indexerTransformStats) {
return new DataFrameTransformStats(id,
public static TransformStats stoppedStats(String id, TransformIndexerStats indexerTransformStats) {
return new TransformStats(id,
State.STOPPED,
null,
null,
indexerTransformStats,
DataFrameTransformCheckpointingInfo.EMPTY);
TransformCheckpointingInfo.EMPTY);
}
public DataFrameTransformStats(String id, State state, @Nullable String reason,
@Nullable NodeAttributes node, DataFrameIndexerTransformStats stats,
DataFrameTransformCheckpointingInfo checkpointingInfo) {
public TransformStats(String id, State state, @Nullable String reason,
@Nullable NodeAttributes node, TransformIndexerStats stats,
TransformCheckpointingInfo checkpointingInfo) {
this.id = Objects.requireNonNull(id);
this.state = Objects.requireNonNull(state);
this.reason = reason;
@ -102,7 +102,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
this.checkpointingInfo = Objects.requireNonNull(checkpointingInfo);
}
public DataFrameTransformStats(StreamInput in) throws IOException {
public TransformStats(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
this.id = in.readString();
this.state = in.readEnum(State.class);
@ -112,27 +112,27 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
} else {
this.node = null;
}
this.indexerStats = new DataFrameIndexerTransformStats(in);
this.checkpointingInfo = new DataFrameTransformCheckpointingInfo(in);
this.indexerStats = new TransformIndexerStats(in);
this.checkpointingInfo = new TransformCheckpointingInfo(in);
} else {
// Prior to version 7.4 DataFrameTransformStats didn't exist, and we have
// to do the best we can of reading from a DataFrameTransformStoredDoc object
// (which is called DataFrameTransformStateAndStats in 7.2/7.3)
this.id = in.readString();
DataFrameTransformState transformState = new DataFrameTransformState(in);
TransformState transformState = new TransformState(in);
this.state = State.fromComponents(transformState.getTaskState(), transformState.getIndexerState());
this.reason = transformState.getReason();
this.node = null;
this.indexerStats = new DataFrameIndexerTransformStats(in);
this.checkpointingInfo = new DataFrameTransformCheckpointingInfo(in);
this.indexerStats = new TransformIndexerStats(in);
this.checkpointingInfo = new TransformCheckpointingInfo(in);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.ID.getPreferredName(), id);
builder.field(TransformField.ID.getPreferredName(), id);
builder.field(STATE_FIELD.getPreferredName(), state.value());
if (reason != null) {
builder.field(REASON_FIELD.getPreferredName(), reason);
@ -140,7 +140,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
if (node != null) {
builder.field(NODE_FIELD.getPreferredName(), node);
}
builder.field(DataFrameField.STATS_FIELD.getPreferredName(), indexerStats, params);
builder.field(TransformField.STATS_FIELD.getPreferredName(), indexerStats, params);
builder.field(CHECKPOINTING_INFO_FIELD.getPreferredName(), checkpointingInfo, params);
builder.endObject();
return builder;
@ -165,8 +165,8 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
// to do the best we can of writing to a DataFrameTransformStoredDoc object
// (which is called DataFrameTransformStateAndStats in 7.2/7.3)
out.writeString(id);
Tuple<DataFrameTransformTaskState, IndexerState> stateComponents = state.toComponents();
new DataFrameTransformState(stateComponents.v1(),
Tuple<TransformTaskState, IndexerState> stateComponents = state.toComponents();
new TransformState(stateComponents.v1(),
stateComponents.v2(),
checkpointingInfo.getNext().getPosition(),
checkpointingInfo.getLast().getCheckpoint(),
@ -193,7 +193,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
return false;
}
DataFrameTransformStats that = (DataFrameTransformStats) other;
TransformStats that = (TransformStats) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.state, that.state)
@ -225,11 +225,11 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
this.node = node;
}
public DataFrameIndexerTransformStats getIndexerStats() {
public TransformIndexerStats getIndexerStats() {
return indexerStats;
}
public DataFrameTransformCheckpointingInfo getCheckpointingInfo() {
public TransformCheckpointingInfo getCheckpointingInfo() {
return checkpointingInfo;
}
@ -250,16 +250,16 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
return in.readEnum(State.class);
}
public static State fromComponents(DataFrameTransformTaskState taskState, IndexerState indexerState) {
public static State fromComponents(TransformTaskState taskState, IndexerState indexerState) {
if (taskState == null || taskState == DataFrameTransformTaskState.STOPPED) {
if (taskState == null || taskState == TransformTaskState.STOPPED) {
return STOPPED;
} else if (taskState == DataFrameTransformTaskState.FAILED) {
} else if (taskState == TransformTaskState.FAILED) {
return FAILED;
} else {
// If we get here then the task state must be started, and that means we should have an indexer state
assert(taskState == DataFrameTransformTaskState.STARTED);
assert(taskState == TransformTaskState.STARTED);
assert(indexerState != null);
switch (indexerState) {
@ -288,25 +288,25 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject {
return name().toLowerCase(Locale.ROOT);
}
public Tuple<DataFrameTransformTaskState, IndexerState> toComponents() {
public Tuple<TransformTaskState, IndexerState> toComponents() {
switch (this) {
case STARTED:
return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.STARTED);
return new Tuple<>(TransformTaskState.STARTED, IndexerState.STARTED);
case INDEXING:
return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.INDEXING);
return new Tuple<>(TransformTaskState.STARTED, IndexerState.INDEXING);
case ABORTING:
return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.ABORTING);
return new Tuple<>(TransformTaskState.STARTED, IndexerState.ABORTING);
case STOPPING:
return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.STOPPING);
return new Tuple<>(TransformTaskState.STARTED, IndexerState.STOPPING);
case STOPPED:
// This one is not deterministic, because an overall state of STOPPED could arise
// from either (STOPPED, null) or (STARTED, STOPPED). However, (STARTED, STOPPED)
// is a very short-lived state so it's reasonable to assume the other, especially
// as this method is only for mixed version cluster compatibility.
return new Tuple<>(DataFrameTransformTaskState.STOPPED, null);
return new Tuple<>(TransformTaskState.STOPPED, null);
case FAILED:
return new Tuple<>(DataFrameTransformTaskState.FAILED, null);
return new Tuple<>(TransformTaskState.FAILED, null);
default:
throw new IllegalStateException("Unexpected state enum value: " + this);
}

View File

@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Objects;
@ -25,34 +25,34 @@ import java.util.Objects;
* A wrapper for grouping transform state and stats when persisting to an index.
* Not intended to be returned in endpoint responses.
*/
public class DataFrameTransformStoredDoc implements Writeable, ToXContentObject {
public class TransformStoredDoc implements Writeable, ToXContentObject {
public static final String NAME = "data_frame_transform_state_and_stats";
public static final ParseField STATE_FIELD = new ParseField("state");
private final String id;
private final DataFrameTransformState transformState;
private final DataFrameIndexerTransformStats transformStats;
private final TransformState transformState;
private final TransformIndexerStats transformStats;
public static final ConstructingObjectParser<DataFrameTransformStoredDoc, Void> PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<TransformStoredDoc, Void> PARSER = new ConstructingObjectParser<>(
NAME, true,
a -> new DataFrameTransformStoredDoc((String) a[0],
(DataFrameTransformState) a[1],
(DataFrameIndexerTransformStats) a[2]));
a -> new TransformStoredDoc((String) a[0],
(TransformState) a[1],
(TransformIndexerStats) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameTransformState.PARSER::apply, STATE_FIELD);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p),
DataFrameField.STATS_FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), TransformField.ID);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), TransformState.PARSER::apply, STATE_FIELD);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p),
TransformField.STATS_FIELD);
}
public static DataFrameTransformStoredDoc fromXContent(XContentParser parser) throws IOException {
public static TransformStoredDoc fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
/**
* Get the persisted state and stats document name from the Data Frame Transform Id.
* Get the persisted state and stats document name from the Transform Id.
*
* @return The id of document the where the transform stats are persisted
*/
@ -60,28 +60,28 @@ public class DataFrameTransformStoredDoc implements Writeable, ToXContentObject
return NAME + "-" + transformId;
}
public DataFrameTransformStoredDoc(String id, DataFrameTransformState state, DataFrameIndexerTransformStats stats) {
public TransformStoredDoc(String id, TransformState state, TransformIndexerStats stats) {
this.id = Objects.requireNonNull(id);
this.transformState = Objects.requireNonNull(state);
this.transformStats = Objects.requireNonNull(stats);
}
public DataFrameTransformStoredDoc(StreamInput in) throws IOException {
public TransformStoredDoc(StreamInput in) throws IOException {
this.id = in.readString();
this.transformState = new DataFrameTransformState(in);
this.transformStats = new DataFrameIndexerTransformStats(in);
this.transformState = new TransformState(in);
this.transformStats = new TransformIndexerStats(in);
if (in.getVersion().before(Version.V_7_4_0)) {
new DataFrameTransformCheckpointingInfo(in);
new TransformCheckpointingInfo(in);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.ID.getPreferredName(), id);
builder.field(TransformField.ID.getPreferredName(), id);
builder.field(STATE_FIELD.getPreferredName(), transformState, params);
builder.field(DataFrameField.STATS_FIELD.getPreferredName(), transformStats, params);
builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME);
builder.field(TransformField.STATS_FIELD.getPreferredName(), transformStats, params);
builder.field(TransformField.INDEX_DOC_TYPE.getPreferredName(), NAME);
builder.endObject();
return builder;
}
@ -92,7 +92,7 @@ public class DataFrameTransformStoredDoc implements Writeable, ToXContentObject
transformState.writeTo(out);
transformStats.writeTo(out);
if (out.getVersion().before(Version.V_7_4_0)) {
DataFrameTransformCheckpointingInfo.EMPTY.writeTo(out);
TransformCheckpointingInfo.EMPTY.writeTo(out);
}
}
@ -111,7 +111,7 @@ public class DataFrameTransformStoredDoc implements Writeable, ToXContentObject
return false;
}
DataFrameTransformStoredDoc that = (DataFrameTransformStoredDoc) other;
TransformStoredDoc that = (TransformStoredDoc) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.transformState, that.transformState)
@ -122,11 +122,11 @@ public class DataFrameTransformStoredDoc implements Writeable, ToXContentObject
return id;
}
public DataFrameIndexerTransformStats getTransformStats() {
public TransformIndexerStats getTransformStats() {
return transformStats;
}
public DataFrameTransformState getTransformState() {
public TransformState getTransformState() {
return transformState;
}

View File

@ -13,20 +13,20 @@ import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import java.util.Locale;
public enum DataFrameTransformTaskState implements Writeable {
public enum TransformTaskState implements Writeable {
STOPPED, STARTED, FAILED;
public static DataFrameTransformTaskState fromString(String name) {
public static TransformTaskState fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public static DataFrameTransformTaskState fromStream(StreamInput in) throws IOException {
return in.readEnum(DataFrameTransformTaskState.class);
public static TransformTaskState fromStream(StreamInput in) throws IOException {
return in.readEnum(TransformTaskState.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
DataFrameTransformTaskState state = this;
TransformTaskState state = this;
out.writeEnum(state);
}

View File

@ -22,7 +22,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import java.io.IOException;
import java.util.Collection;
@ -78,9 +78,9 @@ public class AggregationConfig implements Writeable, ToXContentObject {
if (source.isEmpty()) {
if (lenient) {
logger.warn(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION);
logger.warn(TransformMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION);
} else {
throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION);
throw new IllegalArgumentException(TransformMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION);
}
} else {
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source);
@ -90,7 +90,7 @@ public class AggregationConfig implements Writeable, ToXContentObject {
aggregations = AggregatorFactories.parseAggregators(sourceParser);
} catch (Exception e) {
if (lenient) {
logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION, e);
logger.warn(TransformMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION, e);
} else {
throw e;
}

View File

@ -21,8 +21,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameMessages;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
@ -45,7 +45,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
private final Map<String, SingleGroupSource> groups;
public GroupConfig(final Map<String, Object> source, final Map<String, SingleGroupSource> groups) {
this.source = ExceptionsHelper.requireNonNull(source, DataFrameField.GROUP_BY.getPreferredName());
this.source = ExceptionsHelper.requireNonNull(source, TransformField.GROUP_BY.getPreferredName());
this.groups = groups;
}
@ -115,9 +115,9 @@ public class GroupConfig implements Writeable, ToXContentObject {
if (source.isEmpty()) {
if (lenient) {
logger.warn(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY);
logger.warn(TransformMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY);
} else {
throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY);
throw new IllegalArgumentException(TransformMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY);
}
} else {
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source);
@ -126,7 +126,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
groups = parseGroupConfig(sourceParser, lenient);
} catch (Exception e) {
if (lenient) {
logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_GROUP_BY, e);
logger.warn(TransformMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_GROUP_BY, e);
} else {
throw e;
}

View File

@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper;
import java.io.IOException;
@ -67,18 +67,18 @@ public class PivotConfig implements Writeable, ToXContentObject {
});
parser.declareObject(constructorArg(),
(p, c) -> (GroupConfig.fromXContent(p, lenient)), DataFrameField.GROUP_BY);
(p, c) -> (GroupConfig.fromXContent(p, lenient)), TransformField.GROUP_BY);
parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGREGATIONS);
parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGS);
parser.declareInt(optionalConstructorArg(), DataFrameField.MAX_PAGE_SEARCH_SIZE);
parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), TransformField.AGGREGATIONS);
parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), TransformField.AGGS);
parser.declareInt(optionalConstructorArg(), TransformField.MAX_PAGE_SEARCH_SIZE);
return parser;
}
public PivotConfig(final GroupConfig groups, final AggregationConfig aggregationConfig, Integer maxPageSearchSize) {
this.groups = ExceptionsHelper.requireNonNull(groups, DataFrameField.GROUP_BY.getPreferredName());
this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, DataFrameField.AGGREGATIONS.getPreferredName());
this.groups = ExceptionsHelper.requireNonNull(groups, TransformField.GROUP_BY.getPreferredName());
this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, TransformField.AGGREGATIONS.getPreferredName());
this.maxPageSearchSize = maxPageSearchSize;
}
@ -91,10 +91,10 @@ public class PivotConfig implements Writeable, ToXContentObject {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DataFrameField.GROUP_BY.getPreferredName(), groups);
builder.field(DataFrameField.AGGREGATIONS.getPreferredName(), aggregationConfig);
builder.field(TransformField.GROUP_BY.getPreferredName(), groups);
builder.field(TransformField.AGGREGATIONS.getPreferredName(), aggregationConfig);
if (maxPageSearchSize != null) {
builder.field(DataFrameField.MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize);
builder.field(TransformField.MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize);
}
builder.endObject();
return builder;

View File

@ -12,7 +12,7 @@ import java.util.regex.Pattern;
/**
* Yet Another String utilities class.
*/
public final class DataFrameStrings {
public final class TransformStrings {
/**
* Valid user id pattern.
@ -24,7 +24,7 @@ public final class DataFrameStrings {
public static final int ID_LENGTH_LIMIT = 64;
private DataFrameStrings() {
private TransformStrings() {
}
public static boolean isValidId(String id) {

View File

@ -132,13 +132,13 @@ import org.elasticsearch.xpack.core.security.user.LogstashSystemUser;
import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser;
import org.elasticsearch.xpack.core.security.user.SystemUser;
import org.elasticsearch.xpack.core.security.user.XPackUser;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartTransformAction;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction;
import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField;
import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField;
import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction;
@ -1124,13 +1124,13 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true));
Role role = Role.builder(roleDescriptor, null).build();
assertThat(role.cluster().check(DeleteDataFrameTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetDataFrameTransformsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetDataFrameTransformsStatsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PreviewDataFrameTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PutDataFrameTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(StartDataFrameTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(StopDataFrameTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false));
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
@ -1163,13 +1163,13 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true));
Role role = Role.builder(roleDescriptor, null).build();
assertThat(role.cluster().check(DeleteDataFrameTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(GetDataFrameTransformsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetDataFrameTransformsStatsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PreviewDataFrameTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(PutDataFrameTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(StartDataFrameTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(StopDataFrameTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true));
assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(false));
assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false));
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));

View File

@ -9,28 +9,28 @@ package org.elasticsearch.xpack.core.transform;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStatsTests;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStatsTests;
import java.util.HashMap;
import java.util.Map;
public class DataFrameFeatureSetUsageTests extends AbstractWireSerializingTestCase<DataFrameFeatureSetUsage> {
public class TransformFeatureSetUsageTests extends AbstractWireSerializingTestCase<TransformFeatureSetUsage> {
@Override
protected DataFrameFeatureSetUsage createTestInstance() {
protected TransformFeatureSetUsage createTestInstance() {
Map<String, Long> transformCountByState = new HashMap<>();
if (randomBoolean()) {
transformCountByState.put(randomFrom(IndexerState.values()).toString(), randomLong());
}
return new DataFrameFeatureSetUsage(randomBoolean(), randomBoolean(), transformCountByState,
DataFrameIndexerTransformStatsTests.randomStats());
return new TransformFeatureSetUsage(randomBoolean(), randomBoolean(), transformCountByState,
TransformIndexerStatsTests.randomStats());
}
@Override
protected Reader<DataFrameFeatureSetUsage> instanceReader() {
return DataFrameFeatureSetUsage::new;
protected Reader<TransformFeatureSetUsage> instanceReader() {
return TransformFeatureSetUsage::new;
}
}

View File

@ -14,16 +14,16 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
public class DataFrameMessagesTests extends ESTestCase {
public class TransformMessagesTests extends ESTestCase {
public void testGetMessage_WithFormatStrings() {
String formattedMessage = DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT, "30s",
String formattedMessage = TransformMessages.getMessage(TransformMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT, "30s",
"my_transform");
assertEquals("Timed out after [30s] while waiting for data frame transform [my_transform] to stop", formattedMessage);
}
public void testMessageProperFormat() throws IllegalArgumentException, IllegalAccessException {
Field[] declaredFields = DataFrameMessages.class.getFields();
Field[] declaredFields = TransformMessages.class.getFields();
int checkedMessages = 0;
for (Field field : declaredFields) {
@ -31,7 +31,7 @@ public class DataFrameMessagesTests extends ESTestCase {
if (java.lang.reflect.Modifier.isStatic(modifiers) && java.lang.reflect.Modifier.isFinal(modifiers)
&& field.getType().isAssignableFrom(String.class)) {
assertSingleMessage((String) field.get(DataFrameMessages.class));
assertSingleMessage((String) field.get(TransformMessages.class));
++checkedMessages;
}
}

View File

@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.transforms.SyncConfig;
import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig;
import org.junit.Before;
@ -23,7 +23,7 @@ import java.util.List;
import static java.util.Collections.emptyList;
public abstract class AbstractSerializingDataFrameTestCase<T extends ToXContent & Writeable>
public abstract class AbstractSerializingTransformTestCase<T extends ToXContent & Writeable>
extends AbstractSerializingTestCase<T> {
private NamedWriteableRegistry namedWriteableRegistry;
@ -34,11 +34,11 @@ public abstract class AbstractSerializingDataFrameTestCase<T extends ToXContent
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(),
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, TransformField.TIME_BASED_SYNC.getPreferredName(),
TimeSyncConfig::new));
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
namedXContentRegistry = new NamedXContentRegistry(namedXContents);

View File

@ -12,8 +12,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.transforms.SyncConfig;
import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig;
import org.junit.Before;
@ -22,7 +22,7 @@ import java.util.List;
import static java.util.Collections.emptyList;
public abstract class AbstractWireSerializingDataFrameTestCase<T extends Writeable> extends AbstractWireSerializingTestCase<T> {
public abstract class AbstractWireSerializingTransformTestCase<T extends Writeable> extends AbstractWireSerializingTestCase<T> {
/**
* Test case that ensures aggregation named objects are registered
*/
@ -34,11 +34,11 @@ public abstract class AbstractWireSerializingDataFrameTestCase<T extends Writeab
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(),
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, TransformField.TIME_BASED_SYNC.getPreferredName(),
TimeSyncConfig::new));
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
namedXContentRegistry = new NamedXContentRegistry(namedXContents);

View File

@ -8,9 +8,9 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction.Request;
public class StartDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class DeleteTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {
return new Request(randomAlphaOfLengthBetween(1, 20), randomBoolean());

View File

@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsAction.Request;
import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Request;
public class GetDataFrameTransformsActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class GetTransformsActionRequestTests extends AbstractWireSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {

View File

@ -12,9 +12,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigTests;
import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests;
import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent;
import java.io.IOException;
@ -22,15 +22,15 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class GetDataFrameTransformsActionResponseTests extends AbstractWireSerializingDataFrameTestCase<Response> {
public class GetTransformsActionResponseTests extends AbstractWireSerializingTransformTestCase<Response> {
public void testInvalidTransforms() throws IOException {
List<DataFrameTransformConfig> transforms = new ArrayList<>();
List<TransformConfig> transforms = new ArrayList<>();
transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
transforms.add(DataFrameTransformConfigTests.randomInvalidDataFrameTransformConfig());
transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
transforms.add(DataFrameTransformConfigTests.randomInvalidDataFrameTransformConfig());
transforms.add(TransformConfigTests.randomDataFrameTransformConfig());
transforms.add(TransformConfigTests.randomInvalidDataFrameTransformConfig());
transforms.add(TransformConfigTests.randomDataFrameTransformConfig());
transforms.add(TransformConfigTests.randomInvalidDataFrameTransformConfig());
Response r = new Response(transforms, transforms.size());
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
@ -46,10 +46,10 @@ public class GetDataFrameTransformsActionResponseTests extends AbstractWireSeria
@SuppressWarnings("unchecked")
public void testNoHeaderInResponse() throws IOException {
List<DataFrameTransformConfig> transforms = new ArrayList<>();
List<TransformConfig> transforms = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 10); ++i) {
transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
transforms.add(TransformConfigTests.randomDataFrameTransformConfig());
}
Response r = new Response(transforms, transforms.size());
@ -71,9 +71,9 @@ public class GetDataFrameTransformsActionResponseTests extends AbstractWireSeria
@Override
protected Response createTestInstance() {
List<DataFrameTransformConfig> configs = new ArrayList<>();
List<TransformConfig> configs = new ArrayList<>();
for (int i = 0; i < randomInt(10); ++i) {
configs.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
configs.add(TransformConfigTests.randomDataFrameTransformConfig());
}
return new Response(configs, randomNonNegativeLong());

View File

@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsStatsAction.Request;
import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Request;
public class GetDataFrameTransformsStatsActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class GetTransformsStatsActionRequestTests extends AbstractWireSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {
if (randomBoolean()) {

View File

@ -10,20 +10,20 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsStatsAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStatsTests;
import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformStatsTests;
import java.util.ArrayList;
import java.util.List;
public class GetDataFrameTransformsStatsActionResponseTests extends AbstractWireSerializingDataFrameTestCase<Response> {
public class GetTransformsStatsActionResponseTests extends AbstractWireSerializingTransformTestCase<Response> {
@Override
protected Response createTestInstance() {
List<DataFrameTransformStats> stats = new ArrayList<>();
List<TransformStats> stats = new ArrayList<>();
int totalStats = randomInt(10);
for (int i = 0; i < totalStats; ++i) {
stats.add(DataFrameTransformStatsTests.randomDataFrameTransformStats());
stats.add(TransformStatsTests.randomDataFrameTransformStats());
}
int totalErrors = randomInt(10);
List<TaskOperationFailure> taskFailures = new ArrayList<>(totalErrors);

View File

@ -11,9 +11,9 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigTests;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Request;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfigTests;
@ -21,7 +21,7 @@ import java.io.IOException;
import static org.elasticsearch.xpack.core.transform.transforms.SourceConfigTests.randomSourceConfig;
public class PreviewDataFrameTransformActionRequestTests extends AbstractSerializingDataFrameTestCase<Request> {
public class PreviewTransformActionRequestTests extends AbstractSerializingTransformTestCase<Request> {
@Override
protected Request doParseInstance(XContentParser parser) throws IOException {
@ -40,12 +40,12 @@ public class PreviewDataFrameTransformActionRequestTests extends AbstractSeriali
@Override
protected Request createTestInstance() {
DataFrameTransformConfig config = new DataFrameTransformConfig(
TransformConfig config = new TransformConfig(
"transform-preview",
randomSourceConfig(),
new DestConfig("unused-transform-preview-index", null),
null,
randomBoolean() ? DataFrameTransformConfigTests.randomSyncConfig() : null,
randomBoolean() ? TransformConfigTests.randomSyncConfig() : null,
null,
PivotConfigTests.randomPivotConfig(),
null);

View File

@ -9,7 +9,7 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction.Response;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response;
import java.io.IOException;
import java.util.ArrayList;
@ -18,7 +18,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PreviewDataFrameTransformsActionResponseTests extends AbstractSerializingTestCase<Response> {
public class PreviewTransformsActionResponseTests extends AbstractSerializingTestCase<Response> {
@Override

View File

@ -7,14 +7,14 @@
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction.Response;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PreviewDataFrameTransformsActionResponseWireTests extends AbstractWireSerializingDataFrameTestCase<Response> {
public class PreviewTransformsActionResponseWireTests extends AbstractWireSerializingTransformTestCase<Response> {
@Override
protected Response createTestInstance() {

View File

@ -11,10 +11,10 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.action.PutDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigTests;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.action.PutTransformAction.Request;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests;
import org.elasticsearch.xpack.core.transform.transforms.SyncConfig;
import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig;
import org.junit.Before;
@ -23,7 +23,7 @@ import java.util.List;
import static java.util.Collections.emptyList;
public class PutDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class PutTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
private String transformId;
@Before
@ -38,7 +38,7 @@ public class PutDataFrameTransformActionRequestTests extends AbstractWireSeriali
@Override
protected Request createTestInstance() {
DataFrameTransformConfig config = DataFrameTransformConfigTests.randomDataFrameTransformConfigWithoutHeaders(transformId);
TransformConfig config = TransformConfigTests.randomDataFrameTransformConfigWithoutHeaders(transformId);
return new Request(config, randomBoolean());
}
@ -47,7 +47,7 @@ public class PutDataFrameTransformActionRequestTests extends AbstractWireSeriali
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(),
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, TransformField.TIME_BASED_SYNC.getPreferredName(),
TimeSyncConfig::new));
return new NamedWriteableRegistry(namedWriteables);
}

View File

@ -1,23 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
public class StartDataFrameTransformTaskActionRequestTests extends
AbstractWireSerializingTestCase<StartDataFrameTransformTaskAction.Request> {
@Override
protected StartDataFrameTransformTaskAction.Request createTestInstance() {
return new StartDataFrameTransformTaskAction.Request(randomAlphaOfLength(4), randomBoolean());
}
@Override
protected Writeable.Reader<StartDataFrameTransformTaskAction.Request> instanceReader() {
return StartDataFrameTransformTaskAction.Request::new;
}
}

View File

@ -1,23 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
public class StartDataFrameTransformTaskActionResponseTests extends
AbstractWireSerializingTestCase<StartDataFrameTransformTaskAction.Response> {
@Override
protected StartDataFrameTransformTaskAction.Response createTestInstance() {
return new StartDataFrameTransformTaskAction.Response(randomBoolean());
}
@Override
protected Writeable.Reader<StartDataFrameTransformTaskAction.Response> instanceReader() {
return StartDataFrameTransformTaskAction.Response::new;
}
}

View File

@ -8,9 +8,9 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.action.StartTransformAction.Request;
public class DeleteDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class StartTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {
return new Request(randomAlphaOfLengthBetween(1, 20), randomBoolean());

View File

@ -7,9 +7,9 @@
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction.Response;
import org.elasticsearch.xpack.core.transform.action.StartTransformAction.Response;
public class StopDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase<Response> {
public class StartTransformActionResponseTests extends AbstractWireSerializingTransformTestCase<Response> {
@Override
protected Response createTestInstance() {

View File

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
public class StartTransformTaskActionRequestTests extends
AbstractWireSerializingTestCase<StartTransformTaskAction.Request> {
@Override
protected StartTransformTaskAction.Request createTestInstance() {
return new StartTransformTaskAction.Request(randomAlphaOfLength(4), randomBoolean());
}
@Override
protected Writeable.Reader<StartTransformTaskAction.Request> instanceReader() {
return StartTransformTaskAction.Request::new;
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
public class StartTransformTaskActionResponseTests extends
AbstractWireSerializingTestCase<StartTransformTaskAction.Response> {
@Override
protected StartTransformTaskAction.Response createTestInstance() {
return new StartTransformTaskAction.Response(randomBoolean());
}
@Override
protected Writeable.Reader<StartTransformTaskAction.Response> instanceReader() {
return StartTransformTaskAction.Response::new;
}
}

View File

@ -11,14 +11,14 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Request;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
public class StopDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
public class StopTransformActionRequestTests extends AbstractWireSerializingTestCase<Request> {
@Override
protected Request createTestInstance() {
@ -52,7 +52,7 @@ public class StopDataFrameTransformActionRequestTests extends AbstractWireSerial
String dataFrameId = "dataframe-id";
Task dataFrameTask = new Task(1L, "persistent", "action",
DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + dataFrameId,
TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + dataFrameId,
TaskId.EMPTY_TASK_ID, Collections.emptyMap());
Request request = new Request("unrelated", false, false, null, false);

View File

@ -7,9 +7,9 @@
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformAction.Response;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Response;
public class StartDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase<Response> {
public class StopTransformActionResponseTests extends AbstractWireSerializingTransformTestCase<Response> {
@Override
protected Response createTestInstance() {

View File

@ -7,11 +7,11 @@
package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.xpack.core.transform.action.UpdateDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Request;
import static org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate;
import static org.elasticsearch.xpack.core.transform.transforms.TransformConfigUpdateTests.randomDataFrameTransformConfigUpdate;
public class UpdateDataFrameTransformActionRequestTests extends AbstractWireSerializingDataFrameTestCase<Request> {
public class UpdateTransformActionRequestTests extends AbstractWireSerializingTransformTestCase<Request> {
@Override
protected Writeable.Reader<Request> instanceReader() {

View File

@ -8,17 +8,17 @@ package org.elasticsearch.xpack.core.transform.action;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.action.UpdateDataFrameTransformAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigTests;
import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Response;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests;
import java.io.IOException;
public class UpdateDataFrameTransformsActionResponseTests extends AbstractSerializingDataFrameTestCase<Response> {
public class UpdateTransformsActionResponseTests extends AbstractSerializingTransformTestCase<Response> {
@Override
protected Response createTestInstance() {
return new Response(DataFrameTransformConfigTests.randomDataFrameTransformConfigWithoutHeaders());
return new Response(TransformConfigTests.randomDataFrameTransformConfigWithoutHeaders());
}
@Override
@ -28,6 +28,6 @@ public class UpdateDataFrameTransformsActionResponseTests extends AbstractSerial
@Override
protected Response doParseInstance(XContentParser parser) throws IOException {
return new Response(DataFrameTransformConfig.fromXContent(parser, null, false));
return new Response(TransformConfig.fromXContent(parser, null, false));
}
}

View File

@ -13,16 +13,16 @@ import java.util.Date;
import static org.hamcrest.Matchers.nullValue;
public class DataFrameAuditMessageTests extends AbstractXContentTestCase<DataFrameAuditMessage> {
public class TransformAuditMessageTests extends AbstractXContentTestCase<TransformAuditMessage> {
public void testGetJobType() {
DataFrameAuditMessage message = createTestInstance();
TransformAuditMessage message = createTestInstance();
assertThat(message.getJobType(), nullValue());
}
@Override
protected DataFrameAuditMessage doParseInstance(XContentParser parser) {
return DataFrameAuditMessage.PARSER.apply(parser, null);
protected TransformAuditMessage doParseInstance(XContentParser parser) {
return TransformAuditMessage.PARSER.apply(parser, null);
}
@Override
@ -31,8 +31,8 @@ public class DataFrameAuditMessageTests extends AbstractXContentTestCase<DataFra
}
@Override
protected DataFrameAuditMessage createTestInstance() {
return new DataFrameAuditMessage(
protected TransformAuditMessage createTestInstance() {
return new TransformAuditMessage(
randomBoolean() ? null : randomAlphaOfLength(10),
randomAlphaOfLengthBetween(1, 20),
randomFrom(Level.values()),

View File

@ -18,8 +18,8 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BaseAggregationBuilder;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.DataFrameNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider;
import org.junit.Before;
import java.util.Collections;
@ -27,11 +27,11 @@ import java.util.List;
import static java.util.Collections.emptyList;
public abstract class AbstractSerializingDataFrameTestCase<T extends ToXContent & Writeable>
public abstract class AbstractSerializingTransformTestCase<T extends ToXContent & Writeable>
extends AbstractSerializingTestCase<T> {
protected static Params TO_XCONTENT_PARAMS = new ToXContent.MapParams(
Collections.singletonMap(DataFrameField.FOR_INTERNAL_STORAGE, "true"));
Collections.singletonMap(TransformField.FOR_INTERNAL_STORAGE, "true"));
/**
* Test case that ensures aggregation named objects are registered
@ -49,7 +49,7 @@ public abstract class AbstractSerializingDataFrameTestCase<T extends ToXContent
MockDeprecatedQueryBuilder::new));
namedWriteables.add(new NamedWriteableRegistry.Entry(AggregationBuilder.class, MockDeprecatedAggregationBuilder.NAME,
MockDeprecatedAggregationBuilder::new));
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(),
namedWriteables.add(new NamedWriteableRegistry.Entry(SyncConfig.class, TransformField.TIME_BASED_SYNC.getPreferredName(),
TimeSyncConfig::new));
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
@ -57,7 +57,7 @@ public abstract class AbstractSerializingDataFrameTestCase<T extends ToXContent
new ParseField(MockDeprecatedQueryBuilder.NAME), (p, c) -> MockDeprecatedQueryBuilder.fromXContent(p)));
namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class,
new ParseField(MockDeprecatedAggregationBuilder.NAME), (p, c) -> MockDeprecatedAggregationBuilder.fromXContent(p)));
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
namedXContentRegistry = new NamedXContentRegistry(namedXContents);

View File

@ -1,37 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.transforms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class DataFrameTransformCheckpointStatsTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformCheckpointStats>
{
public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000),
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
}
@Override
protected DataFrameTransformCheckpointStats doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformCheckpointStats.fromXContent(parser);
}
@Override
protected DataFrameTransformCheckpointStats createTestInstance() {
return randomDataFrameTransformCheckpointStats();
}
@Override
protected Reader<DataFrameTransformCheckpointStats> instanceReader() {
return DataFrameTransformCheckpointStats::new;
}
}

View File

@ -12,7 +12,7 @@ import org.junit.Before;
import java.io.IOException;
public class DestConfigTests extends AbstractSerializingDataFrameTestCase<DestConfig> {
public class DestConfigTests extends AbstractSerializingTransformTestCase<DestConfig> {
private boolean lenient;

View File

@ -25,7 +25,7 @@ import org.junit.Before;
import java.io.IOException;
import java.util.LinkedHashMap;
public class QueryConfigTests extends AbstractSerializingDataFrameTestCase<QueryConfig> {
public class QueryConfigTests extends AbstractSerializingTransformTestCase<QueryConfig> {
private boolean lenient;

View File

@ -13,7 +13,7 @@ import org.junit.Before;
import java.io.IOException;
import java.util.function.Predicate;
public class SourceConfigTests extends AbstractSerializingDataFrameTestCase<SourceConfig> {
public class SourceConfigTests extends AbstractSerializingTransformTestCase<SourceConfig> {
private boolean lenient;

View File

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.transforms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class TransformCheckpointStatsTests extends AbstractSerializingTransformTestCase<TransformCheckpointStats>
{
public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() {
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
TransformIndexerPositionTests.randomDataFrameIndexerPosition(),
randomBoolean() ? null : TransformProgressTests.randomDataFrameTransformProgress(),
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
}
@Override
protected TransformCheckpointStats doParseInstance(XContentParser parser) throws IOException {
return TransformCheckpointStats.fromXContent(parser);
}
@Override
protected TransformCheckpointStats createTestInstance() {
return randomDataFrameTransformCheckpointStats();
}
@Override
protected Reader<TransformCheckpointStats> instanceReader() {
return TransformCheckpointStats::new;
}
}

View File

@ -20,30 +20,30 @@ import java.util.TreeMap;
import static org.elasticsearch.test.TestMatchers.matchesPattern;
public class DataFrameTransformCheckpointTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformCheckpoint> {
public class TransformCheckpointTests extends AbstractSerializingTransformTestCase<TransformCheckpoint> {
public static DataFrameTransformCheckpoint randomDataFrameTransformCheckpoints() {
return new DataFrameTransformCheckpoint(randomAlphaOfLengthBetween(1, 10), randomNonNegativeLong(), randomNonNegativeLong(),
public static TransformCheckpoint randomDataFrameTransformCheckpoints() {
return new TransformCheckpoint(randomAlphaOfLengthBetween(1, 10), randomNonNegativeLong(), randomNonNegativeLong(),
randomCheckpointsByIndex(), randomNonNegativeLong());
}
@Override
protected DataFrameTransformCheckpoint doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformCheckpoint.fromXContent(parser, false);
protected TransformCheckpoint doParseInstance(XContentParser parser) throws IOException {
return TransformCheckpoint.fromXContent(parser, false);
}
@Override
protected DataFrameTransformCheckpoint createTestInstance() {
protected TransformCheckpoint createTestInstance() {
return randomDataFrameTransformCheckpoints();
}
@Override
protected Reader<DataFrameTransformCheckpoint> instanceReader() {
return DataFrameTransformCheckpoint::new;
protected Reader<TransformCheckpoint> instanceReader() {
return TransformCheckpoint::new;
}
public void testXContentForInternalStorage() throws IOException {
DataFrameTransformCheckpoint dataFrameTransformCheckpoints = randomDataFrameTransformCheckpoints();
TransformCheckpoint dataFrameTransformCheckpoints = randomDataFrameTransformCheckpoints();
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = dataFrameTransformCheckpoints.toXContent(xContentBuilder, getToXContentParams());
@ -62,12 +62,12 @@ public class DataFrameTransformCheckpointTests extends AbstractSerializingDataFr
otherCheckpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), new long[] { 1, 2, 3 });
long timeUpperBound = randomNonNegativeLong();
DataFrameTransformCheckpoint dataFrameTransformCheckpoints = new DataFrameTransformCheckpoint(id, timestamp, checkpoint,
TransformCheckpoint dataFrameTransformCheckpoints = new TransformCheckpoint(id, timestamp, checkpoint,
checkpointsByIndex, timeUpperBound);
// same
assertTrue(dataFrameTransformCheckpoints.matches(dataFrameTransformCheckpoints));
DataFrameTransformCheckpoint dataFrameTransformCheckpointsCopy = copyInstance(dataFrameTransformCheckpoints);
TransformCheckpoint dataFrameTransformCheckpointsCopy = copyInstance(dataFrameTransformCheckpoints);
// with copy
assertTrue(dataFrameTransformCheckpoints.matches(dataFrameTransformCheckpointsCopy));
@ -75,19 +75,19 @@ public class DataFrameTransformCheckpointTests extends AbstractSerializingDataFr
// other id
assertFalse(dataFrameTransformCheckpoints
.matches(new DataFrameTransformCheckpoint(id + "-1", timestamp, checkpoint, checkpointsByIndex, timeUpperBound)));
.matches(new TransformCheckpoint(id + "-1", timestamp, checkpoint, checkpointsByIndex, timeUpperBound)));
// other timestamp
assertTrue(dataFrameTransformCheckpoints
.matches(new DataFrameTransformCheckpoint(id, (timestamp / 2) + 1, checkpoint, checkpointsByIndex, timeUpperBound)));
.matches(new TransformCheckpoint(id, (timestamp / 2) + 1, checkpoint, checkpointsByIndex, timeUpperBound)));
// other checkpoint
assertTrue(dataFrameTransformCheckpoints
.matches(new DataFrameTransformCheckpoint(id, timestamp, (checkpoint / 2) + 1, checkpointsByIndex, timeUpperBound)));
.matches(new TransformCheckpoint(id, timestamp, (checkpoint / 2) + 1, checkpointsByIndex, timeUpperBound)));
// other index checkpoints
assertFalse(dataFrameTransformCheckpoints
.matches(new DataFrameTransformCheckpoint(id, timestamp, checkpoint, otherCheckpointsByIndex, timeUpperBound)));
.matches(new TransformCheckpoint(id, timestamp, checkpoint, otherCheckpointsByIndex, timeUpperBound)));
// other time upper bound
assertTrue(dataFrameTransformCheckpoints
.matches(new DataFrameTransformCheckpoint(id, timestamp, checkpoint, checkpointsByIndex, (timeUpperBound / 2) + 1)));
.matches(new TransformCheckpoint(id, timestamp, checkpoint, checkpointsByIndex, (timeUpperBound / 2) + 1)));
}
public void testGetBehind() {
@ -119,53 +119,53 @@ public class DataFrameTransformCheckpointTests extends AbstractSerializingDataFr
long checkpoint = randomLongBetween(10, 100);
DataFrameTransformCheckpoint checkpointOld = new DataFrameTransformCheckpoint(
TransformCheckpoint checkpointOld = new TransformCheckpoint(
id, timestamp, checkpoint, checkpointsByIndexOld, 0L);
DataFrameTransformCheckpoint checkpointTransientNew = new DataFrameTransformCheckpoint(
TransformCheckpoint checkpointTransientNew = new TransformCheckpoint(
id, timestamp, -1L, checkpointsByIndexNew, 0L);
DataFrameTransformCheckpoint checkpointNew = new DataFrameTransformCheckpoint(
TransformCheckpoint checkpointNew = new TransformCheckpoint(
id, timestamp, checkpoint + 1, checkpointsByIndexNew, 0L);
DataFrameTransformCheckpoint checkpointOlderButNewerShardsCheckpoint = new DataFrameTransformCheckpoint(
TransformCheckpoint checkpointOlderButNewerShardsCheckpoint = new TransformCheckpoint(
id, timestamp, checkpoint - 1, checkpointsByIndexNew, 0L);
assertEquals(indices * shards * 10L, DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
assertEquals(indices * shards * 10L, DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointNew));
assertEquals(indices * shards * 10L, TransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
assertEquals(indices * shards * 10L, TransformCheckpoint.getBehind(checkpointOld, checkpointNew));
// no difference for same checkpoints, transient or not
assertEquals(0L, DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointOld));
assertEquals(0L, DataFrameTransformCheckpoint.getBehind(checkpointTransientNew, checkpointTransientNew));
assertEquals(0L, DataFrameTransformCheckpoint.getBehind(checkpointNew, checkpointNew));
assertEquals(0L, TransformCheckpoint.getBehind(checkpointOld, checkpointOld));
assertEquals(0L, TransformCheckpoint.getBehind(checkpointTransientNew, checkpointTransientNew));
assertEquals(0L, TransformCheckpoint.getBehind(checkpointNew, checkpointNew));
// new vs transient new: ok
assertEquals(0L, DataFrameTransformCheckpoint.getBehind(checkpointNew, checkpointTransientNew));
assertEquals(0L, TransformCheckpoint.getBehind(checkpointNew, checkpointTransientNew));
// transient new vs new: illegal
Exception e = expectThrows(IllegalArgumentException.class,
() -> DataFrameTransformCheckpoint.getBehind(checkpointTransientNew, checkpointNew));
() -> TransformCheckpoint.getBehind(checkpointTransientNew, checkpointNew));
assertEquals("can not compare transient against a non transient checkpoint", e.getMessage());
// new vs old: illegal
e = expectThrows(IllegalArgumentException.class, () -> DataFrameTransformCheckpoint.getBehind(checkpointNew, checkpointOld));
e = expectThrows(IllegalArgumentException.class, () -> TransformCheckpoint.getBehind(checkpointNew, checkpointOld));
assertEquals("old checkpoint is newer than new checkpoint", e.getMessage());
// corner case: the checkpoint appears older but the inner shard checkpoints are newer
assertEquals(-1L, DataFrameTransformCheckpoint.getBehind(checkpointOlderButNewerShardsCheckpoint, checkpointOld));
assertEquals(-1L, TransformCheckpoint.getBehind(checkpointOlderButNewerShardsCheckpoint, checkpointOld));
// test cases where indices sets do not match
// remove something from old, so newer has 1 index more than old: should be equivalent to old index existing but empty
checkpointsByIndexOld.remove(checkpointsByIndexOld.firstKey());
long behind = DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew);
long behind = TransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew);
assertTrue("Expected behind (" + behind + ") => sum of shard checkpoint differences (" + indices * shards * 10L + ")",
behind >= indices * shards * 10L);
// remove same key: old and new should have equal indices again
checkpointsByIndexNew.remove(checkpointsByIndexNew.firstKey());
assertEquals((indices - 1) * shards * 10L, DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
assertEquals((indices - 1) * shards * 10L, TransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
// remove 1st index from new, now old has 1 index more, which should be ignored
checkpointsByIndexNew.remove(checkpointsByIndexNew.firstKey());
assertEquals((indices - 2) * shards * 10L, DataFrameTransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
assertEquals((indices - 2) * shards * 10L, TransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew));
}
private static Map<String, long[]> randomCheckpointsByIndex() {

View File

@ -15,35 +15,35 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.time.Instant;
public class DataFrameTransformCheckpointingInfoTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformCheckpointingInfo> {
public class TransformCheckpointingInfoTests extends AbstractSerializingTransformTestCase<TransformCheckpointingInfo> {
public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new DataFrameTransformCheckpointingInfo(
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
return new TransformCheckpointingInfo(
TransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
TransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
randomNonNegativeLong(),
randomBoolean() ? null : Instant.ofEpochMilli(randomLongBetween(1, 100000)));
}
@Override
protected DataFrameTransformCheckpointingInfo doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformCheckpointingInfo.fromXContent(parser);
protected TransformCheckpointingInfo doParseInstance(XContentParser parser) throws IOException {
return TransformCheckpointingInfo.fromXContent(parser);
}
@Override
protected DataFrameTransformCheckpointingInfo createTestInstance() {
protected TransformCheckpointingInfo createTestInstance() {
return randomDataFrameTransformCheckpointingInfo();
}
@Override
protected Reader<DataFrameTransformCheckpointingInfo> instanceReader() {
return DataFrameTransformCheckpointingInfo::new;
protected Reader<TransformCheckpointingInfo> instanceReader() {
return TransformCheckpointingInfo::new;
}
public void testBackwardsSerialization() throws IOException {
DataFrameTransformCheckpointingInfo checkpointingInfo = new DataFrameTransformCheckpointingInfo(
DataFrameTransformCheckpointStats.EMPTY,
DataFrameTransformCheckpointStats.EMPTY,
TransformCheckpointingInfo checkpointingInfo = new TransformCheckpointingInfo(
TransformCheckpointStats.EMPTY,
TransformCheckpointStats.EMPTY,
randomNonNegativeLong(),
// changesLastDetectedAt is not serialized to past values, so when it is pulled back in, it will be null
null);
@ -52,7 +52,7 @@ public class DataFrameTransformCheckpointingInfoTests extends AbstractSerializin
checkpointingInfo.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
in.setVersion(Version.V_7_4_0);
DataFrameTransformCheckpointingInfo streamedCheckpointingInfo = new DataFrameTransformCheckpointingInfo(in);
TransformCheckpointingInfo streamedCheckpointingInfo = new TransformCheckpointingInfo(in);
assertEquals(checkpointingInfo, streamedCheckpointingInfo);
}
}

View File

@ -30,21 +30,21 @@ import static org.elasticsearch.xpack.core.transform.transforms.SourceConfigTest
import static org.elasticsearch.xpack.core.transform.transforms.SourceConfigTests.randomSourceConfig;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformConfig> {
public class TransformConfigTests extends AbstractSerializingTransformTestCase<TransformConfig> {
private String transformId;
private boolean runWithHeaders;
public static DataFrameTransformConfig randomDataFrameTransformConfigWithoutHeaders() {
public static TransformConfig randomDataFrameTransformConfigWithoutHeaders() {
return randomDataFrameTransformConfigWithoutHeaders(randomAlphaOfLengthBetween(1, 10));
}
public static DataFrameTransformConfig randomDataFrameTransformConfig() {
public static TransformConfig randomDataFrameTransformConfig() {
return randomDataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10));
}
public static DataFrameTransformConfig randomDataFrameTransformConfigWithoutHeaders(String id) {
return new DataFrameTransformConfig(id,
public static TransformConfig randomDataFrameTransformConfigWithoutHeaders(String id) {
return new TransformConfig(id,
randomSourceConfig(),
randomDestConfig(),
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -56,8 +56,8 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
null);
}
public static DataFrameTransformConfig randomDataFrameTransformConfig(String id) {
return new DataFrameTransformConfig(id,
public static TransformConfig randomDataFrameTransformConfig(String id) {
return new TransformConfig(id,
randomSourceConfig(),
randomDestConfig(),
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -69,13 +69,13 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
randomBoolean() ? null : Version.CURRENT.toString());
}
public static DataFrameTransformConfig randomInvalidDataFrameTransformConfig() {
public static TransformConfig randomInvalidDataFrameTransformConfig() {
if (randomBoolean()) {
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomInvalidSourceConfig(), randomDestConfig(),
return new TransformConfig(randomAlphaOfLengthBetween(1, 10), randomInvalidSourceConfig(), randomDestConfig(),
null, randomBoolean() ? randomSyncConfig() : null, randomHeaders(), PivotConfigTests.randomPivotConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000));
} // else
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(), randomDestConfig(),
return new TransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(), randomDestConfig(),
null, randomBoolean() ? randomSyncConfig() : null, randomHeaders(), PivotConfigTests.randomInvalidPivotConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000));
}
@ -91,22 +91,22 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
}
@Override
protected DataFrameTransformConfig doParseInstance(XContentParser parser) throws IOException {
protected TransformConfig doParseInstance(XContentParser parser) throws IOException {
if (randomBoolean()) {
return DataFrameTransformConfig.fromXContent(parser, transformId, runWithHeaders);
return TransformConfig.fromXContent(parser, transformId, runWithHeaders);
} else {
return DataFrameTransformConfig.fromXContent(parser, null, runWithHeaders);
return TransformConfig.fromXContent(parser, null, runWithHeaders);
}
}
@Override
protected DataFrameTransformConfig createTestInstance() {
protected TransformConfig createTestInstance() {
return runWithHeaders ? randomDataFrameTransformConfig(transformId) : randomDataFrameTransformConfigWithoutHeaders(transformId);
}
@Override
protected Reader<DataFrameTransformConfig> instanceReader() {
return DataFrameTransformConfig::new;
protected Reader<TransformConfig> instanceReader() {
return TransformConfig::new;
}
@Override
@ -137,7 +137,7 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
+ " \"field\": \"points\""
+ "} } } } }";
DataFrameTransformConfig dataFrameTransformConfig = createDataFrameTransformConfigFromString(pivotTransform, "test_match_all");
TransformConfig dataFrameTransformConfig = createDataFrameTransformConfigFromString(pivotTransform, "test_match_all");
assertNotNull(dataFrameTransformConfig.getSource().getQueryConfig());
assertTrue(dataFrameTransformConfig.getSource().getQueryConfig().isValid());
@ -213,7 +213,7 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
}
public void testXContentForInternalStorage() throws IOException {
DataFrameTransformConfig dataFrameTransformConfig = randomDataFrameTransformConfig();
TransformConfig dataFrameTransformConfig = randomDataFrameTransformConfig();
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = dataFrameTransformConfig.toXContent(xContentBuilder, getToXContentParams());
@ -231,11 +231,11 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
}
public void testMaxLengthDescription() {
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> new DataFrameTransformConfig("id",
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> new TransformConfig("id",
randomSourceConfig(), randomDestConfig(), null, null, null, PivotConfigTests.randomPivotConfig(), randomAlphaOfLength(1001)));
assertThat(exception.getMessage(), equalTo("[description] must be less than 1000 characters in length."));
String description = randomAlphaOfLength(1000);
DataFrameTransformConfig config = new DataFrameTransformConfig("id",
TransformConfig config = new TransformConfig("id",
randomSourceConfig(), randomDestConfig(), null, null, null, PivotConfigTests.randomPivotConfig(), description);
assertThat(description, equalTo(config.getDescription()));
}
@ -257,7 +257,7 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
+ " \"field\": \"points\""
+ "} } } } }";
DataFrameTransformConfig dataFrameTransformConfig = createDataFrameTransformConfigFromString(pivotTransform, "body_id");
TransformConfig dataFrameTransformConfig = createDataFrameTransformConfigFromString(pivotTransform, "body_id");
assertEquals("body_id", dataFrameTransformConfig.getId());
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
@ -268,9 +268,9 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
}
private DataFrameTransformConfig createDataFrameTransformConfigFromString(String json, String id) throws IOException {
private TransformConfig createDataFrameTransformConfigFromString(String json, String id) throws IOException {
final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(),
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json);
return DataFrameTransformConfig.fromXContent(parser, id, false);
return TransformConfig.fromXContent(parser, id, false);
}
}

View File

@ -21,15 +21,15 @@ import java.time.Instant;
import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfigTests.randomDataFrameTransformConfig;
import static org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests.randomDataFrameTransformConfig;
import static org.elasticsearch.xpack.core.transform.transforms.DestConfigTests.randomDestConfig;
import static org.elasticsearch.xpack.core.transform.transforms.SourceConfigTests.randomSourceConfig;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformConfigUpdateTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformConfigUpdate> {
public class TransformConfigUpdateTests extends AbstractSerializingTransformTestCase<TransformConfigUpdate> {
public static DataFrameTransformConfigUpdate randomDataFrameTransformConfigUpdate() {
return new DataFrameTransformConfigUpdate(
public static TransformConfigUpdate randomDataFrameTransformConfigUpdate() {
return new TransformConfigUpdate(
randomBoolean() ? null : randomSourceConfig(),
randomBoolean() ? null : randomDestConfig(),
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -42,33 +42,33 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
}
@Override
protected DataFrameTransformConfigUpdate doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformConfigUpdate.fromXContent(parser);
protected TransformConfigUpdate doParseInstance(XContentParser parser) throws IOException {
return TransformConfigUpdate.fromXContent(parser);
}
@Override
protected DataFrameTransformConfigUpdate createTestInstance() {
protected TransformConfigUpdate createTestInstance() {
return randomDataFrameTransformConfigUpdate();
}
@Override
protected Reader<DataFrameTransformConfigUpdate> instanceReader() {
return DataFrameTransformConfigUpdate::new;
protected Reader<TransformConfigUpdate> instanceReader() {
return TransformConfigUpdate::new;
}
public void testIsNoop() {
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
DataFrameTransformConfig config = randomDataFrameTransformConfig();
DataFrameTransformConfigUpdate update = new DataFrameTransformConfigUpdate(null, null, null, null, null);
TransformConfig config = randomDataFrameTransformConfig();
TransformConfigUpdate update = new TransformConfigUpdate(null, null, null, null, null);
assertTrue("null update is not noop", update.isNoop(config));
update = new DataFrameTransformConfigUpdate(config.getSource(),
update = new TransformConfigUpdate(config.getSource(),
config.getDestination(),
config.getFrequency(),
config.getSyncConfig(),
config.getDescription());
assertTrue("equal update is not noop", update.isNoop(config));
update = new DataFrameTransformConfigUpdate(config.getSource(),
update = new TransformConfigUpdate(config.getSource(),
config.getDestination(),
config.getFrequency(),
config.getSyncConfig(),
@ -78,7 +78,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
}
public void testApply() {
DataFrameTransformConfig config = new DataFrameTransformConfig("time-transform",
TransformConfig config = new TransformConfig("time-transform",
randomSourceConfig(),
randomDestConfig(),
TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -88,7 +88,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000),
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.V_7_2_0.toString());
DataFrameTransformConfigUpdate update = new DataFrameTransformConfigUpdate(null, null, null, null, null);
TransformConfigUpdate update = new TransformConfigUpdate(null, null, null, null, null);
assertThat(config, equalTo(update.apply(config)));
SourceConfig sourceConfig = new SourceConfig("the_new_index");
@ -96,11 +96,11 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
TimeValue frequency = TimeValue.timeValueSeconds(10);
SyncConfig syncConfig = new TimeSyncConfig("time_field", TimeValue.timeValueSeconds(30));
String newDescription = "new description";
update = new DataFrameTransformConfigUpdate(sourceConfig, destConfig, frequency, syncConfig, newDescription);
update = new TransformConfigUpdate(sourceConfig, destConfig, frequency, syncConfig, newDescription);
Map<String, String> headers = Collections.singletonMap("foo", "bar");
update.setHeaders(headers);
DataFrameTransformConfig updatedConfig = update.apply(config);
TransformConfig updatedConfig = update.apply(config);
assertThat(updatedConfig.getSource(), equalTo(sourceConfig));
assertThat(updatedConfig.getDestination(), equalTo(destConfig));
@ -112,7 +112,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
}
public void testApplyWithSyncChange() {
DataFrameTransformConfig batchConfig = new DataFrameTransformConfig("batch-transform",
TransformConfig batchConfig = new TransformConfig("batch-transform",
randomSourceConfig(),
randomDestConfig(),
TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -123,7 +123,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.CURRENT.toString());
DataFrameTransformConfigUpdate update = new DataFrameTransformConfigUpdate(null,
TransformConfigUpdate update = new TransformConfigUpdate(null,
null,
null,
TimeSyncConfigTests.randomTimeSyncConfig(),
@ -133,7 +133,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
assertThat(ex.getMessage(),
equalTo("Cannot change the current sync configuration of transform [batch-transform] from [null] to [time]"));
DataFrameTransformConfig timeSyncedConfig = new DataFrameTransformConfig("time-transform",
TransformConfig timeSyncedConfig = new TransformConfig("time-transform",
randomSourceConfig(),
randomDestConfig(),
TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
@ -144,7 +144,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.CURRENT.toString());
DataFrameTransformConfigUpdate fooSyncUpdate = new DataFrameTransformConfigUpdate(null,
TransformConfigUpdate fooSyncUpdate = new TransformConfigUpdate(null,
null,
null,
new FooSync(),
@ -163,12 +163,12 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData
}
@Override
public QueryBuilder getRangeQuery(DataFrameTransformCheckpoint newCheckpoint) {
public QueryBuilder getRangeQuery(TransformCheckpoint newCheckpoint) {
return null;
}
@Override
public QueryBuilder getRangeQuery(DataFrameTransformCheckpoint oldCheckpoint, DataFrameTransformCheckpoint newCheckpoint) {
public QueryBuilder getRangeQuery(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint) {
return null;
}

View File

@ -15,20 +15,20 @@ import java.util.HashMap;
import java.util.Map;
import java.util.function.Predicate;
public class DataFrameIndexerPositionTests extends AbstractSerializingTestCase<DataFrameIndexerPosition> {
public class TransformIndexerPositionTests extends AbstractSerializingTestCase<TransformIndexerPosition> {
public static DataFrameIndexerPosition randomDataFrameIndexerPosition() {
return new DataFrameIndexerPosition(randomPosition(), randomPosition());
public static TransformIndexerPosition randomDataFrameIndexerPosition() {
return new TransformIndexerPosition(randomPosition(), randomPosition());
}
@Override
protected DataFrameIndexerPosition createTestInstance() {
protected TransformIndexerPosition createTestInstance() {
return randomDataFrameIndexerPosition();
}
@Override
protected Reader<DataFrameIndexerPosition> instanceReader() {
return DataFrameIndexerPosition::new;
protected Reader<TransformIndexerPosition> instanceReader() {
return TransformIndexerPosition::new;
}
@Override
@ -42,8 +42,8 @@ public class DataFrameIndexerPositionTests extends AbstractSerializingTestCase<D
}
@Override
protected DataFrameIndexerPosition doParseInstance(XContentParser parser) throws IOException {
return DataFrameIndexerPosition.fromXContent(parser);
protected TransformIndexerPosition doParseInstance(XContentParser parser) throws IOException {
return TransformIndexerPosition.fromXContent(parser);
}
private static Map<String, Object> randomPosition() {

View File

@ -13,25 +13,25 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameIndexerTransformStatsTests extends AbstractSerializingTestCase<DataFrameIndexerTransformStats> {
public class TransformIndexerStatsTests extends AbstractSerializingTestCase<TransformIndexerStats> {
@Override
protected DataFrameIndexerTransformStats createTestInstance() {
protected TransformIndexerStats createTestInstance() {
return randomStats();
}
@Override
protected Writeable.Reader<DataFrameIndexerTransformStats> instanceReader() {
return DataFrameIndexerTransformStats::new;
protected Writeable.Reader<TransformIndexerStats> instanceReader() {
return TransformIndexerStats::new;
}
@Override
protected DataFrameIndexerTransformStats doParseInstance(XContentParser parser) {
return DataFrameIndexerTransformStats.fromXContent(parser);
protected TransformIndexerStats doParseInstance(XContentParser parser) {
return TransformIndexerStats.fromXContent(parser);
}
public static DataFrameIndexerTransformStats randomStats() {
return new DataFrameIndexerTransformStats(randomLongBetween(10L, 10000L),
public static TransformIndexerStats randomStats() {
return new TransformIndexerStats(randomLongBetween(10L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
randomLongBetween(0L, 10000L),
@ -41,7 +41,7 @@ public class DataFrameIndexerTransformStatsTests extends AbstractSerializingTest
}
public void testExpAvgIncrement() {
DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats();
TransformIndexerStats stats = new TransformIndexerStats();
assertThat(stats.getExpAvgCheckpointDurationMs(), equalTo(0.0));
assertThat(stats.getExpAvgDocumentsIndexed(), equalTo(0.0));

View File

@ -19,59 +19,59 @@ import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class DataFrameTransformProgressTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformProgress> {
public class TransformProgressTests extends AbstractSerializingTransformTestCase<TransformProgress> {
public static DataFrameTransformProgress randomDataFrameTransformProgress() {
return new DataFrameTransformProgress(
public static TransformProgress randomDataFrameTransformProgress() {
return new TransformProgress(
randomBoolean() ? null : randomLongBetween(0, 10000),
randomBoolean() ? null : randomLongBetween(0, 10000),
randomBoolean() ? null : randomLongBetween(1, 10000));
}
@Override
protected DataFrameTransformProgress doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformProgress.PARSER.apply(parser, null);
protected TransformProgress doParseInstance(XContentParser parser) throws IOException {
return TransformProgress.PARSER.apply(parser, null);
}
@Override
protected DataFrameTransformProgress createTestInstance() {
protected TransformProgress createTestInstance() {
return randomDataFrameTransformProgress();
}
@Override
protected Reader<DataFrameTransformProgress> instanceReader() {
return DataFrameTransformProgress::new;
protected Reader<TransformProgress> instanceReader() {
return TransformProgress::new;
}
public void testPercentComplete() {
DataFrameTransformProgress progress = new DataFrameTransformProgress(0L, 100L, null);
TransformProgress progress = new TransformProgress(0L, 100L, null);
assertThat(progress.getPercentComplete(), equalTo(100.0));
progress = new DataFrameTransformProgress(100L, 0L, null);
progress = new TransformProgress(100L, 0L, null);
assertThat(progress.getPercentComplete(), equalTo(0.0));
progress = new DataFrameTransformProgress(100L, 10000L, null);
progress = new TransformProgress(100L, 10000L, null);
assertThat(progress.getPercentComplete(), equalTo(100.0));
progress = new DataFrameTransformProgress(100L, null, null);
progress = new TransformProgress(100L, null, null);
assertThat(progress.getPercentComplete(), equalTo(0.0));
progress = new DataFrameTransformProgress(100L, 50L, null);
progress = new TransformProgress(100L, 50L, null);
assertThat(progress.getPercentComplete(), closeTo(50.0, 0.000001));
progress = new DataFrameTransformProgress(null, 50L, 10L);
progress = new TransformProgress(null, 50L, 10L);
assertThat(progress.getPercentComplete(), is(nullValue()));
}
public void testConstructor() {
IllegalArgumentException ex =
expectThrows(IllegalArgumentException.class, () -> new DataFrameTransformProgress(-1L, null, null));
expectThrows(IllegalArgumentException.class, () -> new TransformProgress(-1L, null, null));
assertThat(ex.getMessage(), equalTo("[total_docs] must be >0."));
ex = expectThrows(IllegalArgumentException.class, () -> new DataFrameTransformProgress(1L, -1L, null));
ex = expectThrows(IllegalArgumentException.class, () -> new TransformProgress(1L, -1L, null));
assertThat(ex.getMessage(), equalTo("[docs_processed] must be >0."));
ex = expectThrows(IllegalArgumentException.class, () -> new DataFrameTransformProgress(1L, 1L, -1L));
ex = expectThrows(IllegalArgumentException.class, () -> new TransformProgress(1L, 1L, -1L));
assertThat(ex.getMessage(), equalTo("[docs_indexed] must be >0."));
}
@ -79,25 +79,25 @@ public class DataFrameTransformProgressTests extends AbstractSerializingDataFram
long totalDocs = 10_000;
long processedDocs = randomLongBetween(0, totalDocs);
// documentsIndexed are not in past versions, so it would be zero coming in
DataFrameTransformProgress progress = new DataFrameTransformProgress(totalDocs, processedDocs, 0L);
TransformProgress progress = new TransformProgress(totalDocs, processedDocs, 0L);
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_7_2_0);
progress.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
in.setVersion(Version.V_7_2_0);
DataFrameTransformProgress streamedProgress = new DataFrameTransformProgress(in);
TransformProgress streamedProgress = new TransformProgress(in);
assertEquals(progress, streamedProgress);
}
}
progress = new DataFrameTransformProgress(null, processedDocs, 0L);
progress = new TransformProgress(null, processedDocs, 0L);
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_7_2_0);
progress.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
in.setVersion(Version.V_7_2_0);
DataFrameTransformProgress streamedProgress = new DataFrameTransformProgress(in);
assertEquals(new DataFrameTransformProgress(0L, 0L, 0L), streamedProgress);
TransformProgress streamedProgress = new TransformProgress(in);
assertEquals(new TransformProgress(0L, 0L, 0L), streamedProgress);
}
}

View File

@ -14,15 +14,15 @@ import org.elasticsearch.xpack.core.indexing.IndexerState;
import java.io.IOException;
import java.util.function.Predicate;
import static org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformProgressTests.randomDataFrameTransformProgress;
import static org.elasticsearch.xpack.core.transform.transforms.TransformProgressTests.randomDataFrameTransformProgress;
import static org.elasticsearch.xpack.core.transform.transforms.NodeAttributeTests.randomNodeAttributes;
public class DataFrameTransformStateTests extends AbstractSerializingTestCase<DataFrameTransformState> {
public class TransformStateTests extends AbstractSerializingTestCase<TransformState> {
public static DataFrameTransformState randomDataFrameTransformState() {
return new DataFrameTransformState(randomFrom(DataFrameTransformTaskState.values()),
public static TransformState randomDataFrameTransformState() {
return new TransformState(randomFrom(TransformTaskState.values()),
randomFrom(IndexerState.values()),
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
TransformIndexerPositionTests.randomDataFrameIndexerPosition(),
randomLongBetween(0,10),
randomBoolean() ? null : randomAlphaOfLength(10),
randomBoolean() ? null : randomDataFrameTransformProgress(),
@ -30,18 +30,18 @@ public class DataFrameTransformStateTests extends AbstractSerializingTestCase<Da
}
@Override
protected DataFrameTransformState doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformState.fromXContent(parser);
protected TransformState doParseInstance(XContentParser parser) throws IOException {
return TransformState.fromXContent(parser);
}
@Override
protected DataFrameTransformState createTestInstance() {
protected TransformState createTestInstance() {
return randomDataFrameTransformState();
}
@Override
protected Reader<DataFrameTransformState> instanceReader() {
return DataFrameTransformState::new;
protected Reader<TransformState> instanceReader() {
return TransformState::new;
}
@Override

View File

@ -13,30 +13,30 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class DataFrameTransformStatsTests extends AbstractSerializingTestCase<DataFrameTransformStats> {
public class TransformStatsTests extends AbstractSerializingTestCase<TransformStats> {
public static DataFrameTransformStats randomDataFrameTransformStats() {
return new DataFrameTransformStats(randomAlphaOfLength(10),
randomFrom(DataFrameTransformStats.State.values()),
public static TransformStats randomDataFrameTransformStats() {
return new TransformStats(randomAlphaOfLength(10),
randomFrom(TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : NodeAttributeTests.randomNodeAttributes(),
DataFrameIndexerTransformStatsTests.randomStats(),
DataFrameTransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo());
TransformIndexerStatsTests.randomStats(),
TransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo());
}
@Override
protected DataFrameTransformStats doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformStats.fromXContent(parser);
protected TransformStats doParseInstance(XContentParser parser) throws IOException {
return TransformStats.fromXContent(parser);
}
@Override
protected DataFrameTransformStats createTestInstance() {
protected TransformStats createTestInstance() {
return randomDataFrameTransformStats();
}
@Override
protected Reader<DataFrameTransformStats> instanceReader() {
return DataFrameTransformStats::new;
protected Reader<TransformStats> instanceReader() {
return TransformStats::new;
}
@Override

View File

@ -9,29 +9,29 @@ package org.elasticsearch.xpack.core.transform.transforms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.io.IOException;
import java.util.Collections;
public class DataFrameTransformStoredDocTests extends AbstractSerializingDataFrameTestCase<DataFrameTransformStoredDoc> {
public class TransformStoredDocTests extends AbstractSerializingTransformTestCase<TransformStoredDoc> {
protected static ToXContent.Params TO_XCONTENT_PARAMS = new ToXContent.MapParams(
Collections.singletonMap(DataFrameField.FOR_INTERNAL_STORAGE, "true"));
Collections.singletonMap(TransformField.FOR_INTERNAL_STORAGE, "true"));
public static DataFrameTransformStoredDoc randomDataFrameTransformStoredDoc(String id) {
return new DataFrameTransformStoredDoc(id,
DataFrameTransformStateTests.randomDataFrameTransformState(),
DataFrameIndexerTransformStatsTests.randomStats());
public static TransformStoredDoc randomDataFrameTransformStoredDoc(String id) {
return new TransformStoredDoc(id,
TransformStateTests.randomDataFrameTransformState(),
TransformIndexerStatsTests.randomStats());
}
public static DataFrameTransformStoredDoc randomDataFrameTransformStoredDoc() {
public static TransformStoredDoc randomDataFrameTransformStoredDoc() {
return randomDataFrameTransformStoredDoc(randomAlphaOfLengthBetween(1, 10));
}
@Override
protected DataFrameTransformStoredDoc doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransformStoredDoc.PARSER.apply(parser, null);
protected TransformStoredDoc doParseInstance(XContentParser parser) throws IOException {
return TransformStoredDoc.PARSER.apply(parser, null);
}
@Override
@ -42,12 +42,12 @@ public class DataFrameTransformStoredDocTests extends AbstractSerializingDataFra
}
@Override
protected DataFrameTransformStoredDoc createTestInstance() {
protected TransformStoredDoc createTestInstance() {
return randomDataFrameTransformStoredDoc();
}
@Override
protected Reader<DataFrameTransformStoredDoc> instanceReader() {
return DataFrameTransformStoredDoc::new;
protected Reader<TransformStoredDoc> instanceReader() {
return TransformStoredDoc::new;
}
}

View File

@ -14,31 +14,31 @@ import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformTaskStateTests extends ESTestCase {
public class TransformTaskStateTests extends ESTestCase {
public void testValidOrdinals() {
assertThat(DataFrameTransformTaskState.STOPPED.ordinal(), equalTo(0));
assertThat(DataFrameTransformTaskState.STARTED.ordinal(), equalTo(1));
assertThat(DataFrameTransformTaskState.FAILED.ordinal(), equalTo(2));
assertThat(TransformTaskState.STOPPED.ordinal(), equalTo(0));
assertThat(TransformTaskState.STARTED.ordinal(), equalTo(1));
assertThat(TransformTaskState.FAILED.ordinal(), equalTo(2));
}
public void testwriteTo() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
DataFrameTransformTaskState.STOPPED.writeTo(out);
TransformTaskState.STOPPED.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(0));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
DataFrameTransformTaskState.STARTED.writeTo(out);
TransformTaskState.STARTED.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(1));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
DataFrameTransformTaskState.FAILED.writeTo(out);
TransformTaskState.FAILED.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(2));
}
@ -49,19 +49,19 @@ public class DataFrameTransformTaskStateTests extends ESTestCase {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(0);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(DataFrameTransformTaskState.fromStream(in), equalTo(DataFrameTransformTaskState.STOPPED));
assertThat(TransformTaskState.fromStream(in), equalTo(TransformTaskState.STOPPED));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(1);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(DataFrameTransformTaskState.fromStream(in), equalTo(DataFrameTransformTaskState.STARTED));
assertThat(TransformTaskState.fromStream(in), equalTo(TransformTaskState.STARTED));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(2);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(DataFrameTransformTaskState.fromStream(in), equalTo(DataFrameTransformTaskState.FAILED));
assertThat(TransformTaskState.fromStream(in), equalTo(TransformTaskState.FAILED));
}
}
}
@ -70,10 +70,10 @@ public class DataFrameTransformTaskStateTests extends ESTestCase {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE));
try (StreamInput in = out.bytes().streamInput()) {
DataFrameTransformTaskState.fromStream(in);
TransformTaskState.fromStream(in);
fail("Expected IOException");
} catch(IOException e) {
assertThat(e.getMessage(), containsString("Unknown DataFrameTransformTaskState ordinal ["));
assertThat(e.getMessage(), containsString("Unknown TransformTaskState ordinal ["));
}
}

View File

@ -17,34 +17,34 @@ import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class DataFrameTransformTests extends AbstractSerializingDataFrameTestCase<DataFrameTransform> {
public class TransformTests extends AbstractSerializingTransformTestCase<Transform> {
@Override
protected DataFrameTransform doParseInstance(XContentParser parser) throws IOException {
return DataFrameTransform.PARSER.apply(parser, null);
protected Transform doParseInstance(XContentParser parser) throws IOException {
return Transform.PARSER.apply(parser, null);
}
@Override
protected DataFrameTransform createTestInstance() {
return new DataFrameTransform(randomAlphaOfLength(10), randomBoolean() ? null : Version.CURRENT,
protected Transform createTestInstance() {
return new Transform(randomAlphaOfLength(10), randomBoolean() ? null : Version.CURRENT,
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)));
}
@Override
protected Reader<DataFrameTransform> instanceReader() {
return DataFrameTransform::new;
protected Reader<Transform> instanceReader() {
return Transform::new;
}
public void testBackwardsSerialization() throws IOException {
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
DataFrameTransform transformTask = createTestInstance();
Transform transformTask = createTestInstance();
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(Version.V_7_2_0);
transformTask.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
in.setVersion(Version.V_7_2_0);
// Since the old version does not have the version serialized, the version NOW is 7.2.0
DataFrameTransform streamedTask = new DataFrameTransform(in);
Transform streamedTask = new Transform(in);
assertThat(streamedTask.getVersion(), equalTo(Version.V_7_2_0));
assertThat(streamedTask.getId(), equalTo(transformTask.getId()));
}

View File

@ -19,7 +19,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.xpack.core.transform.transforms.AbstractSerializingDataFrameTestCase;
import org.elasticsearch.xpack.core.transform.transforms.AbstractSerializingTransformTestCase;
import org.elasticsearch.xpack.core.transform.transforms.MockDeprecatedAggregationBuilder;
import org.junit.Before;
@ -29,7 +29,7 @@ import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
public class AggregationConfigTests extends AbstractSerializingDataFrameTestCase<AggregationConfig> {
public class AggregationConfigTests extends AbstractSerializingTransformTestCase<AggregationConfig> {
private boolean lenient;

View File

@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.AbstractSerializingDataFrameTestCase;
import org.elasticsearch.xpack.core.transform.transforms.AbstractSerializingTransformTestCase;
import java.io.IOException;
import java.util.Arrays;
@ -21,7 +21,7 @@ import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
public class PivotConfigTests extends AbstractSerializingDataFrameTestCase<PivotConfig> {
public class PivotConfigTests extends AbstractSerializingTransformTestCase<PivotConfig> {
public static PivotConfig randomPivotConfig() {
return new PivotConfig(GroupConfigTests.randomGroupConfig(),

View File

@ -8,30 +8,30 @@ package org.elasticsearch.xpack.core.transform.utils;
import org.elasticsearch.test.ESTestCase;
public class DataFrameStringsTests extends ESTestCase {
public class TransformStringsTests extends ESTestCase {
public void testValidId() {
assertTrue(DataFrameStrings.isValidId("valid-_id"));
assertTrue(TransformStrings.isValidId("valid-_id"));
}
public void testValidId_givenUppercase() {
assertFalse(DataFrameStrings.isValidId("MiXedCase"));
assertFalse(TransformStrings.isValidId("MiXedCase"));
}
public void testValidId_givenStartsWithUnderScore() {
assertFalse(DataFrameStrings.isValidId("_this_bit_is_ok"));
assertFalse(TransformStrings.isValidId("_this_bit_is_ok"));
}
public void testKasValidLengthForId_givenTooLong() {
StringBuilder sb = new StringBuilder();
for (int i=0; i<DataFrameStrings.ID_LENGTH_LIMIT; i++) {
for (int i=0; i<TransformStrings.ID_LENGTH_LIMIT; i++) {
sb.append('#');
}
assertTrue(DataFrameStrings.hasValidLengthForId(sb.toString()));
assertTrue(TransformStrings.hasValidLengthForId(sb.toString()));
sb.append('#');
assertFalse(DataFrameStrings.hasValidLengthForId(sb.toString()));
assertFalse(TransformStrings.hasValidLengthForId(sb.toString()));
}
}

View File

@ -14,8 +14,8 @@ import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.transform.persistence.DataFrameInternalIndex;
import java.io.IOException;
@ -37,12 +37,12 @@ public class DataFrameConfigurationIndexIT extends DataFrameRestTestCase {
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
{
builder.field(DataFrameField.ID.getPreferredName(), fakeTransformName);
builder.field(TransformField.ID.getPreferredName(), fakeTransformName);
}
builder.endObject();
final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
Request req = new Request("PUT",
DataFrameInternalIndex.LATEST_INDEX_NAME + "/_doc/" + DataFrameTransformConfig.documentId(fakeTransformName));
DataFrameInternalIndex.LATEST_INDEX_NAME + "/_doc/" + TransformConfig.documentId(fakeTransformName));
req.setEntity(entity);
client().performRequest(req);
}

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.dataframe.integration;
import org.elasticsearch.client.Request;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.junit.After;
import org.junit.Before;
@ -158,11 +158,11 @@ public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase {
stopDataFrameTransform("pivot_stats_1", false);
// Get rid of the first transform task, but keep the configuration
client().performRequest(new Request("POST", "_tasks/_cancel?actions="+DataFrameField.TASK_NAME+"*"));
client().performRequest(new Request("POST", "_tasks/_cancel?actions="+TransformField.TASK_NAME+"*"));
// Verify that the task is gone
Map<String, Object> tasks =
entityAsMap(client().performRequest(new Request("GET", "_tasks?actions="+DataFrameField.TASK_NAME+"*")));
entityAsMap(client().performRequest(new Request("GET", "_tasks?actions="+TransformField.TASK_NAME+"*")));
assertTrue(((Map<?, ?>)XContentMapValues.extractValue("nodes", tasks)).isEmpty());
createPivotReviewsTransform("pivot_stats_2", "pivot_reviews_stats_2", null);

View File

@ -19,7 +19,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.transform.persistence.DataFrameInternalIndex;
import org.junit.After;
import org.junit.AfterClass;
@ -44,7 +44,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
protected static final String REVIEWS_INDEX_NAME = "reviews";
protected static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "transforms/";
protected static final String DATAFRAME_ENDPOINT = TransformField.REST_BASE_PATH + "transforms/";
@Override
protected Settings restClientSettings() {
@ -231,7 +231,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
protected void startDataframeTransform(String transformId, boolean force, String authHeader, String... warnings) throws IOException {
// start the transform
final Request startTransformRequest = createRequestWithAuth("POST", DATAFRAME_ENDPOINT + transformId + "/_start", authHeader);
startTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force));
startTransformRequest.addParameter(TransformField.FORCE.getPreferredName(), Boolean.toString(force));
if (warnings.length > 0) {
startTransformRequest.setOptions(expectWarnings(warnings));
}
@ -242,8 +242,8 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
protected void stopDataFrameTransform(String transformId, boolean force) throws Exception {
// start the transform
final Request stopTransformRequest = createRequestWithAuth("POST", DATAFRAME_ENDPOINT + transformId + "/_stop", null);
stopTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force));
stopTransformRequest.addParameter(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(true));
stopTransformRequest.addParameter(TransformField.FORCE.getPreferredName(), Boolean.toString(force));
stopTransformRequest.addParameter(TransformField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(true));
Map<String, Object> stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest));
assertThat(stopTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
}
@ -400,7 +400,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
}
protected static void waitForPendingDataFrameTasks() throws Exception {
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrameField.TASK_NAME) == false);
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(TransformField.TASK_NAME) == false);
}
static int getDataFrameCheckpoint(String transformId) throws IOException {

View File

@ -14,7 +14,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;
import org.junit.After;
import org.junit.Before;
@ -66,7 +66,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase {
createContinuousPivotReviewsTransform(transformId, dataFrameIndex, null);
failureTransforms.add(transformId);
startDataframeTransform(transformId, false);
awaitState(transformId, DataFrameTransformStats.State.FAILED);
awaitState(transformId, TransformStats.State.FAILED);
Map<?, ?> fullState = getDataFrameState(transformId);
final String failureReason = "task encountered more than 0 failures; latest failure: " +
"Bulk index experienced failures. See the logs of the node running the transform for details.";
@ -84,7 +84,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase {
// Verify that we can force stop a failed transform
stopDataFrameTransform(transformId, true);
awaitState(transformId, DataFrameTransformStats.State.STOPPED);
awaitState(transformId, TransformStats.State.STOPPED);
fullState = getDataFrameState(transformId);
assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue()));
}
@ -97,7 +97,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase {
createContinuousPivotReviewsTransform(transformId, dataFrameIndex, null);
failureTransforms.add(transformId);
startDataframeTransform(transformId, false);
awaitState(transformId, DataFrameTransformStats.State.FAILED);
awaitState(transformId, TransformStats.State.FAILED);
Map<?, ?> fullState = getDataFrameState(transformId);
final String failureReason = "task encountered more than 0 failures; latest failure: " +
"Bulk index experienced failures. See the logs of the node running the transform for details.";
@ -129,7 +129,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase {
stopDataFrameTransform(transformId, true);
}
private void awaitState(String transformId, DataFrameTransformStats.State state) throws Exception {
private void awaitState(String transformId, TransformStats.State state) throws Exception {
assertBusy(() -> {
String currentState = getDataFrameTransformState(transformId);
assertThat(currentState, equalTo(state.value()));

View File

@ -15,8 +15,8 @@ import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.transform.persistence.DataFrameInternalIndex;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
@ -53,7 +53,7 @@ public class DataFrameTransformInternalIndexIT extends ESRestTestCase {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
builder.startObject("properties");
builder.startObject(DataFrameField.INDEX_DOC_TYPE.getPreferredName()).field("type", "keyword").endObject();
builder.startObject(TransformField.INDEX_DOC_TYPE.getPreferredName()).field("type", "keyword").endObject();
addDataFrameTransformsConfigMappings(builder);
builder.endObject();
builder.endObject();
@ -80,11 +80,11 @@ public class DataFrameTransformInternalIndexIT extends ESRestTestCase {
+ "\"frequency\":\"1s\""
+ "}";
client.index(new IndexRequest(OLD_INDEX)
.id(DataFrameTransformConfig.documentId(transformId))
.id(TransformConfig.documentId(transformId))
.source(config, XContentType.JSON)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT);
GetResponse getResponse = client.get(new GetRequest(OLD_INDEX, DataFrameTransformConfig.documentId(transformId)),
GetResponse getResponse = client.get(new GetRequest(OLD_INDEX, TransformConfig.documentId(transformId)),
RequestOptions.DEFAULT);
assertThat(getResponse.isExists(), is(true));
@ -100,11 +100,11 @@ public class DataFrameTransformInternalIndexIT extends ESRestTestCase {
assertThat(updated.getTransformConfiguration().getDescription(), equalTo("updated"));
// Old should now be gone
getResponse = client.get(new GetRequest(OLD_INDEX, DataFrameTransformConfig.documentId(transformId)), RequestOptions.DEFAULT);
getResponse = client.get(new GetRequest(OLD_INDEX, TransformConfig.documentId(transformId)), RequestOptions.DEFAULT);
assertThat(getResponse.isExists(), is(false));
// New should be here
getResponse = client.get(new GetRequest(CURRENT_INDEX, DataFrameTransformConfig.documentId(transformId)),
getResponse = client.get(new GetRequest(CURRENT_INDEX, TransformConfig.documentId(transformId)),
RequestOptions.DEFAULT);
assertThat(getResponse.isExists(), is(true));
}

View File

@ -23,8 +23,8 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import org.elasticsearch.xpack.core.transform.transforms.QueryConfig;
import org.elasticsearch.xpack.core.transform.transforms.SourceConfig;
@ -127,7 +127,7 @@ public class DataFrameTransformProgressIT extends ESRestTestCase {
aggs.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggregationConfig = new AggregationConfig(Collections.emptyMap(), aggs);
PivotConfig pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null);
DataFrameTransformConfig config = new DataFrameTransformConfig("get_progress_transform",
TransformConfig config = new TransformConfig("get_progress_transform",
sourceConfig,
destConfig,
null,
@ -141,7 +141,7 @@ public class DataFrameTransformProgressIT extends ESRestTestCase {
TransformProgressGatherer.getSearchRequest(config, config.getSource().getQueryConfig().getQuery()),
RequestOptions.DEFAULT);
DataFrameTransformProgress progress =
TransformProgress progress =
TransformProgressGatherer.searchResponseToDataFrameTransformProgressFunction().apply(response);
assertThat(progress.getTotalDocs(), equalTo(1000L));
@ -152,7 +152,7 @@ public class DataFrameTransformProgressIT extends ESRestTestCase {
QueryConfig queryConfig = new QueryConfig(Collections.emptyMap(), QueryBuilders.termQuery("user_id", "user_26"));
pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null);
sourceConfig = new SourceConfig(new String[]{REVIEWS_INDEX_NAME}, queryConfig);
config = new DataFrameTransformConfig("get_progress_transform",
config = new TransformConfig("get_progress_transform",
sourceConfig,
destConfig,
null,
@ -172,7 +172,7 @@ public class DataFrameTransformProgressIT extends ESRestTestCase {
histgramGroupConfig = new GroupConfig(Collections.emptyMap(),
Collections.singletonMap("every_50", new HistogramGroupSource("missing_field", 50.0)));
pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null);
config = new DataFrameTransformConfig("get_progress_transform",
config = new TransformConfig("get_progress_transform",
sourceConfig,
destConfig,
null,

View File

@ -9,8 +9,8 @@ package org.elasticsearch.xpack.dataframe.integration;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStoredDoc;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc;
import org.elasticsearch.xpack.transform.persistence.DataFrameInternalIndex;
import org.junit.Before;
@ -20,7 +20,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.xpack.core.transform.DataFrameField.INDEX_DOC_TYPE;
import static org.elasticsearch.xpack.core.transform.TransformField.INDEX_DOC_TYPE;
import static org.elasticsearch.xpack.transform.DataFrameFeatureSet.PROVIDED_STATS;
public class DataFrameUsageIT extends DataFrameRestTestCase {
@ -34,11 +34,11 @@ public class DataFrameUsageIT extends DataFrameRestTestCase {
Response usageResponse = client().performRequest(new Request("GET", "_xpack/usage"));
Map<?, ?> usageAsMap = entityAsMap(usageResponse);
assertTrue((boolean) XContentMapValues.extractValue("data_frame.available", usageAsMap));
assertTrue((boolean) XContentMapValues.extractValue("data_frame.enabled", usageAsMap));
assertTrue((boolean) XContentMapValues.extractValue("transform.available", usageAsMap));
assertTrue((boolean) XContentMapValues.extractValue("transform.enabled", usageAsMap));
// no transforms, no stats
assertEquals(null, XContentMapValues.extractValue("data_frame.transforms", usageAsMap));
assertEquals(null, XContentMapValues.extractValue("data_frame.stats", usageAsMap));
assertEquals(null, XContentMapValues.extractValue("transform.transforms", usageAsMap));
assertEquals(null, XContentMapValues.extractValue("transform.stats", usageAsMap));
// create transforms
createPivotReviewsTransform("test_usage", "pivot_reviews", null);
@ -46,8 +46,8 @@ public class DataFrameUsageIT extends DataFrameRestTestCase {
createContinuousPivotReviewsTransform("test_usage_continuous", "pivot_reviews_continuous", null);
usageResponse = client().performRequest(new Request("GET", "_xpack/usage"));
usageAsMap = entityAsMap(usageResponse);
assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("transform.transforms._all", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("transform.transforms.stopped", usageAsMap));
startAndWaitForTransform("test_usage", "pivot_reviews");
stopDataFrameTransform("test_usage", false);
@ -55,7 +55,7 @@ public class DataFrameUsageIT extends DataFrameRestTestCase {
Request statsExistsRequest = new Request("GET",
DataFrameInternalIndex.LATEST_INDEX_NAME+"/_search?q=" +
INDEX_DOC_TYPE.getPreferredName() + ":" +
DataFrameTransformStoredDoc.NAME);
TransformStoredDoc.NAME);
// Verify that we have one stat document
assertBusy(() -> {
Map<String, Object> hasStatsMap = entityAsMap(client().performRequest(statsExistsRequest));
@ -82,17 +82,17 @@ public class DataFrameUsageIT extends DataFrameRestTestCase {
Response response = client().performRequest(new Request("GET", "_xpack/usage"));
Map<String, Object> statsMap = entityAsMap(response);
// we should see some stats
assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", statsMap));
assertEquals(2, XContentMapValues.extractValue("data_frame.transforms.stopped", statsMap));
assertEquals(1, XContentMapValues.extractValue("data_frame.transforms.started", statsMap));
assertEquals(3, XContentMapValues.extractValue("transform.transforms._all", statsMap));
assertEquals(2, XContentMapValues.extractValue("transform.transforms.stopped", statsMap));
assertEquals(1, XContentMapValues.extractValue("transform.transforms.started", statsMap));
for(String statName : PROVIDED_STATS) {
if (statName.equals(DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName())
||statName.equals(DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName())) {
if (statName.equals(TransformIndexerStats.INDEX_TIME_IN_MS.getPreferredName())
||statName.equals(TransformIndexerStats.SEARCH_TIME_IN_MS.getPreferredName())) {
continue;
}
assertEquals("Incorrect stat " + statName,
expectedStats.get(statName) * 2,
XContentMapValues.extractValue("data_frame.stats." + statName, statsMap));
XContentMapValues.extractValue("transform.stats." + statName, statsMap));
}
// Refresh the index so that statistics are searchable
refreshIndex(DataFrameInternalIndex.LATEST_INDEX_VERSIONED_NAME);
@ -104,7 +104,7 @@ public class DataFrameUsageIT extends DataFrameRestTestCase {
usageResponse = client().performRequest(new Request("GET", "_xpack/usage"));
usageAsMap = entityAsMap(usageResponse);
assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("transform.transforms._all", usageAsMap));
assertEquals(3, XContentMapValues.extractValue("transform.transforms.stopped", usageAsMap));
}
}

View File

@ -43,16 +43,16 @@ import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.scheduler.SchedulerEngine;
import org.elasticsearch.xpack.core.transform.DataFrameNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetDataFrameTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartDataFrameTransformTaskAction;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.UpdateDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsAction;
import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction;
import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction;
import org.elasticsearch.xpack.core.transform.action.PutTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartTransformAction;
import org.elasticsearch.xpack.core.transform.action.StartTransformTaskAction;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction;
import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction;
import org.elasticsearch.xpack.transform.action.TransportDeleteDataFrameTransformAction;
import org.elasticsearch.xpack.transform.action.TransportGetDataFrameTransformsAction;
import org.elasticsearch.xpack.transform.action.TransportGetDataFrameTransformsStatsAction;
@ -154,15 +154,15 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu
}
return Arrays.asList(
new ActionHandler<>(PutDataFrameTransformAction.INSTANCE, TransportPutDataFrameTransformAction.class),
new ActionHandler<>(StartDataFrameTransformAction.INSTANCE, TransportStartDataFrameTransformAction.class),
new ActionHandler<>(StartDataFrameTransformTaskAction.INSTANCE, TransportStartDataFrameTransformTaskAction.class),
new ActionHandler<>(StopDataFrameTransformAction.INSTANCE, TransportStopDataFrameTransformAction.class),
new ActionHandler<>(DeleteDataFrameTransformAction.INSTANCE, TransportDeleteDataFrameTransformAction.class),
new ActionHandler<>(GetDataFrameTransformsAction.INSTANCE, TransportGetDataFrameTransformsAction.class),
new ActionHandler<>(GetDataFrameTransformsStatsAction.INSTANCE, TransportGetDataFrameTransformsStatsAction.class),
new ActionHandler<>(PreviewDataFrameTransformAction.INSTANCE, TransportPreviewDataFrameTransformAction.class),
new ActionHandler<>(UpdateDataFrameTransformAction.INSTANCE, TransportUpdateDataFrameTransformAction.class)
new ActionHandler<>(PutTransformAction.INSTANCE, TransportPutDataFrameTransformAction.class),
new ActionHandler<>(StartTransformAction.INSTANCE, TransportStartDataFrameTransformAction.class),
new ActionHandler<>(StartTransformTaskAction.INSTANCE, TransportStartDataFrameTransformTaskAction.class),
new ActionHandler<>(StopTransformAction.INSTANCE, TransportStopDataFrameTransformAction.class),
new ActionHandler<>(DeleteTransformAction.INSTANCE, TransportDeleteDataFrameTransformAction.class),
new ActionHandler<>(GetTransformsAction.INSTANCE, TransportGetDataFrameTransformsAction.class),
new ActionHandler<>(GetTransformsStatsAction.INSTANCE, TransportGetDataFrameTransformsStatsAction.class),
new ActionHandler<>(PreviewTransformAction.INSTANCE, TransportPreviewDataFrameTransformAction.class),
new ActionHandler<>(UpdateTransformAction.INSTANCE, TransportUpdateDataFrameTransformAction.class)
);
}
@ -251,6 +251,6 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu
@Override
public List<Entry> getNamedXContent() {
return new DataFrameNamedXContentProvider().getNamedXContentParsers();
return new TransformNamedXContentProvider().getNamedXContentParsers();
}
}

View File

@ -29,14 +29,14 @@ import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.XPackFeatureSet;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.transform.DataFrameFeatureSetUsage;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameIndexerTransformStats;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransform;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformState;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformStoredDoc;
import org.elasticsearch.xpack.core.transform.transforms.DataFrameTransformTaskState;
import org.elasticsearch.xpack.core.transform.TransformFeatureSetUsage;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.Transform;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformState;
import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc;
import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState;
import org.elasticsearch.xpack.transform.persistence.DataFrameInternalIndex;
import java.util.ArrayList;
@ -59,16 +59,16 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
private static final Logger logger = LogManager.getLogger(DataFrameFeatureSet.class);
public static final String[] PROVIDED_STATS = new String[] {
DataFrameIndexerTransformStats.NUM_PAGES.getPreferredName(),
DataFrameIndexerTransformStats.NUM_INPUT_DOCUMENTS.getPreferredName(),
DataFrameIndexerTransformStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(),
DataFrameIndexerTransformStats.NUM_INVOCATIONS.getPreferredName(),
DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName(),
DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName(),
DataFrameIndexerTransformStats.INDEX_TOTAL.getPreferredName(),
DataFrameIndexerTransformStats.SEARCH_TOTAL.getPreferredName(),
DataFrameIndexerTransformStats.INDEX_FAILURES.getPreferredName(),
DataFrameIndexerTransformStats.SEARCH_FAILURES.getPreferredName(),
TransformIndexerStats.NUM_PAGES.getPreferredName(),
TransformIndexerStats.NUM_INPUT_DOCUMENTS.getPreferredName(),
TransformIndexerStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(),
TransformIndexerStats.NUM_INVOCATIONS.getPreferredName(),
TransformIndexerStats.INDEX_TIME_IN_MS.getPreferredName(),
TransformIndexerStats.SEARCH_TIME_IN_MS.getPreferredName(),
TransformIndexerStats.INDEX_TOTAL.getPreferredName(),
TransformIndexerStats.SEARCH_TOTAL.getPreferredName(),
TransformIndexerStats.INDEX_FAILURES.getPreferredName(),
TransformIndexerStats.SEARCH_FAILURES.getPreferredName(),
};
@Inject
@ -81,7 +81,7 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
@Override
public String name() {
return XPackField.DATA_FRAME;
return XPackField.Transform;
}
@Override
@ -102,26 +102,26 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
@Override
public void usage(ActionListener<XPackFeatureSet.Usage> listener) {
if (enabled == false) {
listener.onResponse(new DataFrameFeatureSetUsage(available(),
listener.onResponse(new TransformFeatureSetUsage(available(),
enabled(),
Collections.emptyMap(),
new DataFrameIndexerTransformStats()));
new TransformIndexerStats()));
return;
}
PersistentTasksCustomMetaData taskMetadata = PersistentTasksCustomMetaData.getPersistentTasksCustomMetaData(clusterService.state());
Collection<PersistentTasksCustomMetaData.PersistentTask<?>> dataFrameTasks = taskMetadata == null ?
Collections.emptyList() :
taskMetadata.findTasks(DataFrameTransform.NAME, (t) -> true);
taskMetadata.findTasks(Transform.NAME, (t) -> true);
final int taskCount = dataFrameTasks.size();
final Map<String, Long> transformsCountByState = new HashMap<>();
for(PersistentTasksCustomMetaData.PersistentTask<?> dataFrameTask : dataFrameTasks) {
DataFrameTransformState state = (DataFrameTransformState)dataFrameTask.getState();
TransformState state = (TransformState)dataFrameTask.getState();
transformsCountByState.merge(state.getTaskState().value(), 1L, Long::sum);
}
ActionListener<DataFrameIndexerTransformStats> totalStatsListener = ActionListener.wrap(
statSummations -> listener.onResponse(new DataFrameFeatureSetUsage(available(),
ActionListener<TransformIndexerStats> totalStatsListener = ActionListener.wrap(
statSummations -> listener.onResponse(new TransformFeatureSetUsage(available(),
enabled(),
transformsCountByState,
statSummations)),
@ -136,13 +136,13 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
}
long totalTransforms = transformCountSuccess.getHits().getTotalHits().value;
if (totalTransforms == 0) {
listener.onResponse(new DataFrameFeatureSetUsage(available(),
listener.onResponse(new TransformFeatureSetUsage(available(),
enabled(),
transformsCountByState,
new DataFrameIndexerTransformStats()));
new TransformIndexerStats()));
return;
}
transformsCountByState.merge(DataFrameTransformTaskState.STOPPED.value(), totalTransforms - taskCount, Long::sum);
transformsCountByState.merge(TransformTaskState.STOPPED.value(), totalTransforms - taskCount, Long::sum);
getStatisticSummations(client, totalStatsListener);
},
transformCountFailure -> {
@ -157,7 +157,7 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
SearchRequest totalTransformCount = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME_PATTERN)
.setTrackTotalHits(true)
.setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameTransformConfig.NAME))))
.filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformConfig.NAME))))
.request();
ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(),
@ -167,7 +167,7 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
client::search);
}
static DataFrameIndexerTransformStats parseSearchAggs(SearchResponse searchResponse) {
static TransformIndexerStats parseSearchAggs(SearchResponse searchResponse) {
List<Long> statisticsList = new ArrayList<>(PROVIDED_STATS.length);
for(String statName : PROVIDED_STATS) {
@ -179,7 +179,7 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
statisticsList.add(0L);
}
}
return new DataFrameIndexerTransformStats(statisticsList.get(0), // numPages
return new TransformIndexerStats(statisticsList.get(0), // numPages
statisticsList.get(1), // numInputDocuments
statisticsList.get(2), // numOutputDocuments
statisticsList.get(3), // numInvocations
@ -191,16 +191,16 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
statisticsList.get(9)); // searchFailures
}
static void getStatisticSummations(Client client, ActionListener<DataFrameIndexerTransformStats> statsListener) {
static void getStatisticSummations(Client client, ActionListener<TransformIndexerStats> statsListener) {
QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(),
DataFrameTransformStoredDoc.NAME)));
.filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(),
TransformStoredDoc.NAME)));
SearchRequestBuilder requestBuilder = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME_PATTERN)
.setSize(0)
.setQuery(queryBuilder);
final String path = DataFrameField.STATS_FIELD.getPreferredName() + ".";
final String path = TransformField.STATS_FIELD.getPreferredName() + ".";
for(String statName : PROVIDED_STATS) {
requestBuilder.addAggregation(AggregationBuilders.sum(statName).field(path + statName));
}
@ -216,7 +216,7 @@ public class DataFrameFeatureSet implements XPackFeatureSet {
},
failure -> {
if (failure instanceof ResourceNotFoundException) {
statsListener.onResponse(new DataFrameIndexerTransformStats());
statsListener.onResponse(new TransformIndexerStats());
} else {
statsListener.onFailure(failure);
}

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.transform.action;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.xpack.core.transform.DataFrameField;
import org.elasticsearch.xpack.core.transform.TransformField;
import java.util.Collection;
import java.util.HashSet;
@ -38,7 +38,7 @@ public final class DataFrameNodes {
Set<String> dataFrameIdsSet = new HashSet<>(dataFrameIds);
Collection<PersistentTasksCustomMetaData.PersistentTask<?>> tasks =
tasksMetaData.findTasks(DataFrameField.TASK_NAME, t -> dataFrameIdsSet.contains(t.getId()));
tasksMetaData.findTasks(TransformField.TASK_NAME, t -> dataFrameIdsSet.contains(t.getId()));
for (PersistentTasksCustomMetaData.PersistentTask<?> task : tasks) {
executorNodes.add(task.getExecutorNode());

View File

@ -22,9 +22,9 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.StopDataFrameTransformAction;
import org.elasticsearch.xpack.core.transform.action.DeleteDataFrameTransformAction.Request;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction;
import org.elasticsearch.xpack.core.transform.action.StopTransformAction;
import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction.Request;
import org.elasticsearch.xpack.transform.notifications.DataFrameAuditor;
import org.elasticsearch.xpack.transform.persistence.DataFrameTransformsConfigManager;
@ -44,7 +44,7 @@ public class TransportDeleteDataFrameTransformAction extends TransportMasterNode
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
DataFrameTransformsConfigManager transformsConfigManager, DataFrameAuditor auditor,
Client client) {
super(DeleteDataFrameTransformAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(DeleteTransformAction.NAME, transportService, clusterService, threadPool, actionFilters,
Request::new, indexNameExpressionResolver);
this.transformsConfigManager = transformsConfigManager;
this.auditor = auditor;
@ -83,8 +83,8 @@ public class TransportDeleteDataFrameTransformAction extends TransportMasterNode
if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) {
executeAsyncWithOrigin(client,
DATA_FRAME_ORIGIN,
StopDataFrameTransformAction.INSTANCE,
new StopDataFrameTransformAction.Request(request.getId(), true, true, null, true),
StopTransformAction.INSTANCE,
new StopTransformAction.Request(request.getId(), true, true, null, true),
ActionListener.wrap(
r -> stopTransformActionListener.onResponse(null),
stopTransformActionListener::onFailure));

Some files were not shown because too many files have changed in this diff Show More