Remove average processing time mapping (elastic/elasticsearch#402)

Original commit: elastic/x-pack-elasticsearch@97fdeaf748
This commit is contained in:
David Kyle 2016-11-28 12:48:14 +00:00 committed by GitHub
parent 39fe1b7b09
commit 3362d5c965
4 changed files with 0 additions and 33 deletions

View File

@ -227,7 +227,6 @@ public class ElasticsearchJobProvider implements JobProvider
XContentBuilder modelSnapshotMapping = ElasticsearchMappings.modelSnapshotMapping();
XContentBuilder modelSizeStatsMapping = ElasticsearchMappings.modelSizeStatsMapping();
XContentBuilder modelDebugMapping = ElasticsearchMappings.modelDebugOutputMapping(termFields);
XContentBuilder processingTimeMapping = ElasticsearchMappings.processingTimeMapping();
XContentBuilder partitionScoreMapping = ElasticsearchMappings.bucketPartitionMaxNormalizedScores();
XContentBuilder dataCountsMapping = ElasticsearchMappings.dataCountsMapping();
@ -243,7 +242,6 @@ public class ElasticsearchJobProvider implements JobProvider
createIndexRequest.mapping(ModelSnapshot.TYPE.getPreferredName(), modelSnapshotMapping);
createIndexRequest.mapping(ModelSizeStats.TYPE.getPreferredName(), modelSizeStatsMapping);
createIndexRequest.mapping(ModelDebugOutput.TYPE.getPreferredName(), modelDebugMapping);
createIndexRequest.mapping(ReservedFieldNames.BUCKET_PROCESSING_TIME_TYPE, processingTimeMapping);
createIndexRequest.mapping(ReservedFieldNames.PARTITION_NORMALIZED_PROB_TYPE, partitionScoreMapping);
createIndexRequest.mapping(DataCounts.TYPE.getPreferredName(), dataCountsMapping);

View File

@ -784,24 +784,4 @@ public class ElasticsearchMappings {
.endObject()
.endObject();
}
public static XContentBuilder processingTimeMapping() throws IOException {
return jsonBuilder()
.startObject()
.startObject(ReservedFieldNames.BUCKET_PROCESSING_TIME_TYPE)
.startObject(ALL)
.field(ENABLED, false)
// analyzer must be specified even though _all is disabled
// because all types in the same index must have the same
// analyzer for a given field
.field(ANALYZER, WHITESPACE)
.endObject()
.startObject(PROPERTIES)
.startObject(ReservedFieldNames.AVERAGE_PROCESSING_TIME_MS)
.field(TYPE, DOUBLE)
.endObject()
.endObject()
.endObject()
.endObject();
}
}

View File

@ -34,9 +34,6 @@ public final class ReservedFieldNames {
*/
private static final String ES_TIMESTAMP = "timestamp";
public static final String BUCKET_PROCESSING_TIME_TYPE = "bucketProcessingTime";
public static final String AVERAGE_PROCESSING_TIME_MS = "averageProcessingTimeMs";
public static final String PARTITION_NORMALIZED_PROB_TYPE = "partitionNormalizedProb";
public static final String PARTITION_NORMALIZED_PROBS = "partitionNormalizedProbs";
@ -98,8 +95,6 @@ public final class ReservedFieldNames {
BucketInfluencer.INITIAL_ANOMALY_SCORE.getPreferredName(), BucketInfluencer.ANOMALY_SCORE.getPreferredName(),
BucketInfluencer.RAW_ANOMALY_SCORE.getPreferredName(), BucketInfluencer.PROBABILITY.getPreferredName(),
AVERAGE_PROCESSING_TIME_MS,
PARTITION_NORMALIZED_PROBS,
PARTITION_NORMALIZED_PROB_TYPE,

View File

@ -87,7 +87,6 @@ public class ElasticsearchMappingsTests extends ESTestCase {
overridden.add(AuditActivity.TYPE.getPreferredName());
overridden.add(AuditMessage.TYPE.getPreferredName());
overridden.add(DataCounts.TYPE.getPreferredName());
overridden.add(ReservedFieldNames.BUCKET_PROCESSING_TIME_TYPE);
overridden.add(CategorizerState.TYPE);
overridden.add(CategoryDefinition.TYPE.getPreferredName());
overridden.add(Job.TYPE);
@ -169,11 +168,6 @@ public class ElasticsearchMappingsTests extends ESTestCase {
parser = new JsonFactory().createParser(inputStream);
parseJson(parser, expected);
builder = ElasticsearchMappings.processingTimeMapping();
inputStream = new BufferedInputStream(new ByteArrayInputStream(builder.string().getBytes(StandardCharsets.UTF_8)));
parser = new JsonFactory().createParser(inputStream);
parseJson(parser, expected);
builder = ElasticsearchMappings.quantilesMapping();
inputStream = new BufferedInputStream(new ByteArrayInputStream(builder.string().getBytes(StandardCharsets.UTF_8)));
parser = new JsonFactory().createParser(inputStream);