add timeseries policy action stubs (#3214)

This commit is contained in:
Tal Levy 2017-12-05 09:09:18 -08:00 committed by GitHub
parent 685231e786
commit d847355b58
15 changed files with 742 additions and 12 deletions

View File

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import java.io.IOException;
/**
* A {@link LifecycleAction} which reroutes shards from one allocation to another.
*/
public class AllocateAction implements LifecycleAction {
public static final String NAME = "allocate";
private static final Logger logger = ESLoggerFactory.getLogger(AllocateAction.class);
private static final ObjectParser<AllocateAction, Void> PARSER = new ObjectParser<>(NAME, AllocateAction::new);
public static AllocateAction parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public AllocateAction() {
}
public AllocateAction(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public void execute(Index index, Client client, Listener listener) {
// nocommit: stub
listener.onSuccess(true);
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import java.io.IOException;
/**
* A {@link LifecycleAction} which force-merges the index's shards to a specific number of segments.
*/
public class ForceMergeAction implements LifecycleAction {
public static final String NAME = "forcemerge";
private static final Logger logger = ESLoggerFactory.getLogger(ForceMergeAction.class);
private static final ObjectParser<ForceMergeAction, Void> PARSER = new ObjectParser<>(NAME, ForceMergeAction::new);
public static ForceMergeAction parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public ForceMergeAction() {
}
public ForceMergeAction(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public void execute(Index index, Client client, Listener listener) {
// nocommit: stub
listener.onSuccess(true);
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -119,6 +119,11 @@ public class IndexLifecycle extends Plugin {
TimeseriesLifecyclePolicy::new),
// Lifecycle actions
new NamedWriteableRegistry.Entry(LifecycleAction.class, AllocateAction.NAME, AllocateAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ReplicasAction.NAME, ReplicasAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new));
}
@ -131,6 +136,10 @@ public class IndexLifecycle extends Plugin {
// Lifecycle Policy
new NamedXContentRegistry.Entry(LifecyclePolicy.class, new ParseField(TimeseriesLifecyclePolicy.TYPE), (p, c) -> TimeseriesLifecyclePolicy.parse(p, c)),
// Lifecycle actions
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(AllocateAction.NAME), AllocateAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReplicasAction.NAME), ReplicasAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse));
}

View File

@ -47,12 +47,6 @@ public abstract class LifecyclePolicy extends AbstractDiffable<LifecyclePolicy>
public static final ParseField PHASES_FIELD = new ParseField("phases");
public static final ParseField TYPE_FIELD = new ParseField("type");
// public static LifecyclePolicy parse(XContentParser parser, Tuple<String, NamedXContentRegistry> context) {
// parser.getXContentRegistry().parseNamedObject()
// Map<String, Object> map = PARSER.apply(parser, context);
// return context.v2().parseNamedObject(LifecyclePolicy.class, map.get("lifecycle_type"), parser, context.v2());
// }
protected final String name;
protected final Map<String, Phase> phases;

View File

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import java.io.IOException;
/**
* A {@link LifecycleAction} that changes the number of replicas for the index.
*/
public class ReplicasAction implements LifecycleAction {
public static final String NAME = "replicas";
private static final Logger logger = ESLoggerFactory.getLogger(ReplicasAction.class);
private static final ObjectParser<ReplicasAction, Void> PARSER = new ObjectParser<>(NAME, ReplicasAction::new);
public static ReplicasAction parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public ReplicasAction() {
}
public ReplicasAction(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public void execute(Index index, Client client, Listener listener) {
// nocommit: stub
listener.onSuccess(true);
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import java.io.IOException;
/**
* A {@link LifecycleAction} which rolls over the index.
*/
public class RolloverAction implements LifecycleAction {
public static final String NAME = "rollover";
private static final Logger logger = ESLoggerFactory.getLogger(RolloverAction.class);
private static final ObjectParser<RolloverAction, Void> PARSER = new ObjectParser<>(NAME, RolloverAction::new);
public static RolloverAction parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public RolloverAction() {
}
public RolloverAction(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public void execute(Index index, Client client, Listener listener) {
// nocommit: stub
listener.onSuccess(true);
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,97 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import java.io.IOException;
/**
* A {@link LifecycleAction} which shrinks the index.
*/
public class ShrinkAction implements LifecycleAction {
public static final String NAME = "shrink";
private static final Logger logger = ESLoggerFactory.getLogger(ShrinkAction.class);
private static final ObjectParser<ShrinkAction, Void> PARSER = new ObjectParser<>(NAME, ShrinkAction::new);
public static ShrinkAction parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public ShrinkAction() {
}
public ShrinkAction(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public void execute(Index index, Client client, Listener listener) {
client.admin().indices().delete(new DeleteIndexRequest(index.getName()), new ActionListener<DeleteIndexResponse>() {
@Override
public void onResponse(DeleteIndexResponse deleteIndexResponse) {
logger.error(deleteIndexResponse);
listener.onSuccess(true);
}
@Override
public void onFailure(Exception e) {
logger.error(e);
listener.onFailure(e);
}
});
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -5,8 +5,10 @@
*/
package org.elasticsearch.xpack.indexlifecycle;
import com.google.common.collect.Maps;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -28,6 +30,11 @@ import java.util.Set;
public class TimeseriesLifecyclePolicy extends LifecyclePolicy {
public static final String TYPE = "timeseries";
static final List<String> VALID_PHASES = Arrays.asList("hot", "warm", "cold", "delete");
static final Set<String> VALID_HOT_ACTIONS = Sets.newHashSet(RolloverAction.NAME);
static final Set<String> VALID_WARM_ACTIONS = Sets.newHashSet(AllocateAction.NAME, ReplicasAction.NAME,
ShrinkAction.NAME, ForceMergeAction.NAME);
static final Set<String> VALID_COLD_ACTIONS = Sets.newHashSet(AllocateAction.NAME, ReplicasAction.NAME);
static final Set<String> VALID_DELETE_ACTIONS = Sets.newHashSet(DeleteAction.NAME);
/**
* @param name
@ -95,10 +102,21 @@ public class TimeseriesLifecyclePolicy extends LifecyclePolicy {
@Override
public void validate(Collection<Phase> phases) {
Set<String> allowedPhases = new HashSet<>(VALID_PHASES);
Map<String, Set<String>> allowedActions = Maps.newHashMapWithExpectedSize(allowedPhases.size());
allowedActions.put("hot", VALID_HOT_ACTIONS);
allowedActions.put("warm", VALID_WARM_ACTIONS);
allowedActions.put("cold", VALID_COLD_ACTIONS);
allowedActions.put("delete", VALID_DELETE_ACTIONS);
phases.forEach(phase -> {
if (allowedPhases.contains(phase.getName()) == false) {
throw new IllegalArgumentException("Timeseries lifecycle does not support phase [" + phase.getName() + "]");
}
phase.getActions().forEach(action -> {
if (allowedActions.get(phase.getName()).contains(action.getWriteableName()) == false) {
throw new IllegalArgumentException("invalid action [" + action.getWriteableName() + "] " +
"defined in phase [" + phase.getName() +"]");
}
});
});
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class AllocateActionTests extends AbstractSerializingTestCase<AllocateAction> {
@Override
protected AllocateAction doParseInstance(XContentParser parser) throws IOException {
return AllocateAction.parse(parser);
}
@Override
protected AllocateAction createTestInstance() {
return new AllocateAction();
}
@Override
protected Reader<AllocateAction> instanceReader() {
return AllocateAction::new;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class ForceMergeActionTests extends AbstractSerializingTestCase<ForceMergeAction> {
@Override
protected ForceMergeAction doParseInstance(XContentParser parser) throws IOException {
return ForceMergeAction.parse(parser);
}
@Override
protected ForceMergeAction createTestInstance() {
return new ForceMergeAction();
}
@Override
protected Reader<ForceMergeAction> instanceReader() {
return ForceMergeAction::new;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class ReplicasActionTests extends AbstractSerializingTestCase<ReplicasAction> {
@Override
protected ReplicasAction doParseInstance(XContentParser parser) throws IOException {
return ReplicasAction.parse(parser);
}
@Override
protected ReplicasAction createTestInstance() {
return new ReplicasAction();
}
@Override
protected Reader<ReplicasAction> instanceReader() {
return ReplicasAction::new;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class RolloverActionTests extends AbstractSerializingTestCase<RolloverAction> {
@Override
protected RolloverAction doParseInstance(XContentParser parser) throws IOException {
return RolloverAction.parse(parser);
}
@Override
protected RolloverAction createTestInstance() {
return new RolloverAction();
}
@Override
protected Reader<RolloverAction> instanceReader() {
return RolloverAction::new;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.indexlifecycle;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class ShrinkActionTests extends AbstractSerializingTestCase<ShrinkAction> {
@Override
protected ShrinkAction doParseInstance(XContentParser parser) throws IOException {
return ShrinkAction.parse(parser);
}
@Override
protected ShrinkAction createTestInstance() {
return new ShrinkAction();
}
@Override
protected Reader<ShrinkAction> instanceReader() {
return ShrinkAction::new;
}
}

View File

@ -16,28 +16,43 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.Before;
import java.io.IOException;
import java.sql.Time;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy.VALID_COLD_ACTIONS;
import static org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy.VALID_DELETE_ACTIONS;
import static org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy.VALID_HOT_ACTIONS;
import static org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy.VALID_PHASES;
import static org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy.VALID_WARM_ACTIONS;
import static org.hamcrest.Matchers.equalTo;
public class TimeseriesLifecyclePolicyTests extends AbstractSerializingTestCase<LifecyclePolicy> {
private NamedXContentRegistry registry;
private String lifecycleName;
private static final AllocateAction TEST_ALLOCATE_ACTION = new AllocateAction();
private static final DeleteAction TEST_DELETE_ACTION = new DeleteAction();
private static final ForceMergeAction TEST_FORCE_MERGE_ACTION = new ForceMergeAction();
private static final ReplicasAction TEST_REPLICAS_ACTION = new ReplicasAction();
private static final RolloverAction TEST_ROLLOVER_ACTION = new RolloverAction();
private static final ShrinkAction TEST_SHRINK_ACTION = new ShrinkAction();
@Before
public void setup() {
List<NamedXContentRegistry.Entry> entries = Arrays
.asList(new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse),
new NamedXContentRegistry.Entry(LifecyclePolicy.class, new ParseField(TimeseriesLifecyclePolicy.TYPE),
TimeseriesLifecyclePolicy::parse));
TimeseriesLifecyclePolicy::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(AllocateAction.NAME), AllocateAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReplicasAction.NAME), ReplicasAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse));
registry = new NamedXContentRegistry(entries);
lifecycleName = randomAlphaOfLength(20); // NOCOMMIT we need to randomise the lifecycle name rather
// than use the same name for all instances
@ -60,7 +75,12 @@ public class TimeseriesLifecyclePolicyTests extends AbstractSerializingTestCase<
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(
Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new)));
Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, AllocateAction.NAME, AllocateAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ReplicasAction.NAME, ReplicasAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new),
new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new)));
}
public void testGetFirstPhase() {
@ -101,7 +121,7 @@ public class TimeseriesLifecyclePolicyTests extends AbstractSerializingTestCase<
}
}
public void testValidate() {
public void testValidatePhases() {
boolean invalid = randomBoolean();
String phaseName = randomFrom("hot", "warm", "cold", "delete");
if (invalid) {
@ -116,4 +136,107 @@ public class TimeseriesLifecyclePolicyTests extends AbstractSerializingTestCase<
new TimeseriesLifecyclePolicy(lifecycleName, phases);
}
}
public void testValidateHotPhase() {
LifecycleAction invalidAction = null;
List<LifecycleAction> actions = randomSubsetOf(VALID_HOT_ACTIONS)
.stream().map(this::getTestAction).collect(Collectors.toList());
if (randomBoolean()) {
invalidAction = getTestAction(randomFrom("allocate", "forcemerge", "delete", "replicas", "shrink"));
actions.add(invalidAction);
}
Map<String, Phase> hotPhase = Collections.singletonMap("hot",
new Phase("hot", TimeValue.ZERO, actions));
if (invalidAction != null) {
Exception e = expectThrows(IllegalArgumentException.class,
() -> new TimeseriesLifecyclePolicy(lifecycleName, hotPhase));
assertThat(e.getMessage(),
equalTo("invalid action [" + invalidAction.getWriteableName() + "] defined in phase [hot]"));
} else {
new TimeseriesLifecyclePolicy(lifecycleName, hotPhase);
}
}
public void testValidateWarmPhase() {
LifecycleAction invalidAction = null;
List<LifecycleAction> actions = randomSubsetOf(VALID_WARM_ACTIONS)
.stream().map(this::getTestAction).collect(Collectors.toList());
if (randomBoolean()) {
invalidAction = getTestAction(randomFrom("rollover", "delete"));
actions.add(invalidAction);
}
Map<String, Phase> warmPhase = Collections.singletonMap("warm",
new Phase("warm", TimeValue.ZERO, actions));
if (invalidAction != null) {
Exception e = expectThrows(IllegalArgumentException.class,
() -> new TimeseriesLifecyclePolicy(lifecycleName, warmPhase));
assertThat(e.getMessage(),
equalTo("invalid action [" + invalidAction.getWriteableName() + "] defined in phase [warm]"));
} else {
new TimeseriesLifecyclePolicy(lifecycleName, warmPhase);
}
}
public void testValidateColdPhase() {
LifecycleAction invalidAction = null;
List<LifecycleAction> actions = randomSubsetOf(VALID_COLD_ACTIONS)
.stream().map(this::getTestAction).collect(Collectors.toList());
if (randomBoolean()) {
invalidAction = getTestAction(randomFrom("rollover", "delete", "forcemerge", "shrink"));
actions.add(invalidAction);
}
Map<String, Phase> coldPhase = Collections.singletonMap("cold",
new Phase("cold", TimeValue.ZERO, actions));
if (invalidAction != null) {
Exception e = expectThrows(IllegalArgumentException.class,
() -> new TimeseriesLifecyclePolicy(lifecycleName, coldPhase));
assertThat(e.getMessage(),
equalTo("invalid action [" + invalidAction.getWriteableName() + "] defined in phase [cold]"));
} else {
new TimeseriesLifecyclePolicy(lifecycleName, coldPhase);
}
}
public void testValidateDeletePhase() {
LifecycleAction invalidAction = null;
List<LifecycleAction> actions = randomSubsetOf(VALID_DELETE_ACTIONS)
.stream().map(this::getTestAction).collect(Collectors.toList());
if (randomBoolean()) {
invalidAction = getTestAction(randomFrom("allocate", "rollover", "replicas", "forcemerge", "shrink"));
actions.add(invalidAction);
}
Map<String, Phase> deletePhase = Collections.singletonMap("delete",
new Phase("delete", TimeValue.ZERO, actions));
if (invalidAction != null) {
Exception e = expectThrows(IllegalArgumentException.class,
() -> new TimeseriesLifecyclePolicy(lifecycleName, deletePhase));
assertThat(e.getMessage(),
equalTo("invalid action [" + invalidAction.getWriteableName() + "] defined in phase [delete]"));
} else {
new TimeseriesLifecyclePolicy(lifecycleName, deletePhase);
}
}
private LifecycleAction getTestAction(String actionName) {
switch (actionName) {
case AllocateAction.NAME:
return TEST_ALLOCATE_ACTION;
case DeleteAction.NAME:
return TEST_DELETE_ACTION;
case ForceMergeAction.NAME:
return TEST_FORCE_MERGE_ACTION;
case ReplicasAction.NAME:
return TEST_REPLICAS_ACTION;
case RolloverAction.NAME:
return TEST_ROLLOVER_ACTION;
case ShrinkAction.NAME:
return TEST_SHRINK_ACTION;
default:
throw new IllegalArgumentException("unsupported timeseries phase action [" + actionName + "]");
}
}
}

View File

@ -14,7 +14,6 @@ import org.elasticsearch.xpack.indexlifecycle.LifecycleAction;
import org.elasticsearch.xpack.indexlifecycle.LifecyclePolicy;
import org.elasticsearch.xpack.indexlifecycle.Phase;
import org.elasticsearch.xpack.indexlifecycle.TestLifecyclePolicy;
import org.elasticsearch.xpack.indexlifecycle.TimeseriesLifecyclePolicy;
import org.elasticsearch.xpack.indexlifecycle.action.GetLifecycleAction.Response;
import org.junit.Before;
@ -22,7 +21,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GetLifecycleResponseTests extends AbstractStreamableTestCase<GetLifecycleAction.Response> {