Adds settings, phase and action objects which control how and when things are executed
This commit is contained in:
parent
3455445d2c
commit
c393944dc0
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
|
||||
public abstract class Action implements ToXContentObject {
|
||||
|
||||
protected abstract void execute(Client client);
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DeleteAction extends Action {
|
||||
private static final Logger logger = ESLoggerFactory.getLogger(DeleteAction.class);
|
||||
|
||||
public static final ParseField INDEX_FIELD = new ParseField("index");
|
||||
|
||||
private Index index;
|
||||
|
||||
public DeleteAction(Index index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(INDEX_FIELD.getPreferredName(), index.getName());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Client client) {
|
||||
client.admin().indices().prepareDelete(index.getName()).execute(new ActionListener<DeleteIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteIndexResponse deleteIndexResponse) {
|
||||
logger.error(deleteIndexResponse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.rest.RestController;
|
|||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.FixedExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
|
@ -55,13 +54,10 @@ public class IndexLifecycle implements ActionPlugin {
|
|||
private boolean tribeNodeClient;
|
||||
|
||||
public static final Setting LIFECYCLE_TIMESERIES_SETTING = Setting.groupSetting("index.lifecycle.timeseries.", (settings) -> {
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN").error("validating setting internally");
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN").error("validating setting internally: " + settings);
|
||||
if (settings.size() == 0) {
|
||||
return;
|
||||
}
|
||||
if (settings.size() != 2 && (settings.get("new") == null || settings.get("delete") == null)) {
|
||||
throw new IllegalArgumentException("you have invalid lifecycle settings, cmon!");
|
||||
}
|
||||
}, Setting.Property.Dynamic, Setting.Property.IndexScope);
|
||||
|
||||
public IndexLifecycle(Settings settings) {
|
||||
|
@ -85,8 +81,8 @@ public class IndexLifecycle implements ActionPlugin {
|
|||
}
|
||||
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN").error("onIndexModule");
|
||||
Index index = indexModule.getIndex();
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN").error("onIndexModule: " + index.getName());
|
||||
long creationDate = settings.getAsLong("index.creation_date", -1L);
|
||||
indexModule.addSettingsUpdateConsumer(LIFECYCLE_TIMESERIES_SETTING,
|
||||
(Settings s) -> indexLifecycleInitialisationService.get().setLifecycleSettings(index, creationDate, s));
|
||||
|
|
|
@ -6,15 +6,11 @@
|
|||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.cluster.LocalNodeMasterListener;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -24,7 +20,6 @@ import org.elasticsearch.xpack.security.InternalClient;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.xpack.indexlifecycle.IndexLifecycle.LIFECYCLE_TIMESERIES_SETTING;
|
||||
|
||||
|
@ -40,7 +35,6 @@ public class IndexLifecycleInitialisationService extends AbstractLifecycleCompon
|
|||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.scheduler = new SchedulerEngine(clock);
|
||||
this.scheduler.register(this);
|
||||
clusterService.addLocalNodeMasterListener(this);
|
||||
}
|
||||
|
||||
|
@ -51,30 +45,21 @@ public class IndexLifecycleInitialisationService extends AbstractLifecycleCompon
|
|||
*/
|
||||
public synchronized void setLifecycleSettings(Index index, long creationDate, Settings settings) {
|
||||
if (isMaster == true) {
|
||||
registerIndexSchedule(index, creationDate, settings);
|
||||
IndexLifecycleSettings lifecycleSettings = new IndexLifecycleSettings(index, creationDate, settings, client);
|
||||
registerIndexSchedule(lifecycleSettings);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This does the heavy lifting of adding an index's lifecycle policy to the scheduler.
|
||||
* @param index The index to schedule a policy for
|
||||
* @param settings The `index.lifecycle.timeseries` settings object
|
||||
* @param lifecycleSettings The index lifecycle settings object
|
||||
*/
|
||||
private void registerIndexSchedule(Index index, long creationDate, Settings settings) {
|
||||
private void registerIndexSchedule(IndexLifecycleSettings lifecycleSettings) {
|
||||
// need to check that this isn't re-kicking an existing policy... diffs, etc.
|
||||
// this is where the genesis of index lifecycle management occurs... kick off the scheduling... all is valid!
|
||||
TimeValue deleteAfter = settings.getAsTime("delete.after", TimeValue.MINUS_ONE);
|
||||
SchedulerEngine.Schedule schedule = (startTime, now) -> {
|
||||
if (startTime == now) {
|
||||
return creationDate + deleteAfter.getMillis();
|
||||
} else {
|
||||
return -1; // do not schedule another delete after already deleted
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: scheduler needs to know which index's settings are being updated...
|
||||
scheduler.add(new SchedulerEngine.Job(index.getName(), schedule));
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN")
|
||||
.error("kicked off lifecycle job to be triggered in " + deleteAfter.getSeconds() + " seconds");
|
||||
lifecycleSettings.schedulePhases(scheduler);
|
||||
|
||||
}
|
||||
|
||||
|
@ -91,27 +76,18 @@ public class IndexLifecycleInitialisationService extends AbstractLifecycleCompon
|
|||
|
||||
@Override
|
||||
public void triggered(SchedulerEngine.Event event) {
|
||||
client.admin().indices().prepareDelete(event.getJobName()).execute(new ActionListener<DeleteIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteIndexResponse deleteIndexResponse) {
|
||||
logger.error(deleteIndexResponse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(e);
|
||||
}
|
||||
});
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMaster() {
|
||||
isMaster = true;
|
||||
clusterService.state().getMetaData().getIndices().valuesIt()
|
||||
.forEachRemaining((idxMeta) -> {
|
||||
if (idxMeta.getSettings().getByPrefix(LIFECYCLE_TIMESERIES_SETTING.getKey()).size() > 0) {
|
||||
registerIndexSchedule(idxMeta.getIndex(), idxMeta.getCreationDate(),
|
||||
idxMeta.getSettings().getByPrefix(LIFECYCLE_TIMESERIES_SETTING.getKey()));
|
||||
.forEachRemaining((idxMeta) -> {
|
||||
if (idxMeta.getSettings().getByPrefix(LIFECYCLE_TIMESERIES_SETTING.getKey()).size() > 0) {
|
||||
IndexLifecycleSettings lifecycleSettings = new IndexLifecycleSettings(idxMeta.getIndex(), idxMeta.getCreationDate(),
|
||||
idxMeta.getSettings().getByPrefix(LIFECYCLE_TIMESERIES_SETTING.getKey()), client);
|
||||
registerIndexSchedule(lifecycleSettings);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexLifecycleSettings implements ToXContentObject {
|
||||
|
||||
public static final ParseField PHASES_FIELD = new ParseField("phases");
|
||||
|
||||
private List<Phase> phases;
|
||||
|
||||
public IndexLifecycleSettings(Index index, long indexCreationDate, Settings settings, Client client) {
|
||||
phases = new ArrayList<>();
|
||||
for (Map.Entry<String, Settings> e : settings.getAsGroups().entrySet()) {
|
||||
Phase phase = new Phase(e.getKey(), index, indexCreationDate, e.getValue(), client);
|
||||
phases.add(phase);
|
||||
}
|
||||
}
|
||||
|
||||
public IndexLifecycleSettings(List<Phase> phases) {
|
||||
this.phases = phases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.array(PHASES_FIELD.getPreferredName(), phases);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public void schedulePhases(SchedulerEngine scheduler) {
|
||||
for (Phase phase : phases) {
|
||||
scheduler.register(phase);
|
||||
scheduler.add(phase);
|
||||
ESLoggerFactory.getLogger("INDEX-LIFECYCLE-PLUGIN")
|
||||
.error("kicked off lifecycle job to be triggered in " + phase.getAfter() + " seconds");
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("new.after", "1m");
|
||||
settingsBuilder.put("delete.after", "3s");
|
||||
settingsBuilder.put("delete.actions.delete.what", "me");
|
||||
Settings settings = settingsBuilder.build();
|
||||
System.out.println(settings);
|
||||
|
||||
long currentTimeMillis = System.currentTimeMillis();
|
||||
System.out.println(currentTimeMillis);
|
||||
|
||||
IndexLifecycleSettings lifecycleSettings = new IndexLifecycleSettings(new Index("test_index", "1234567890"), currentTimeMillis,
|
||||
settings, null);
|
||||
System.out.println(Strings.toString(lifecycleSettings));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine.Event;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine.Schedule;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Phase extends SchedulerEngine.Job implements ToXContentObject, SchedulerEngine.Listener {
|
||||
|
||||
public static final ParseField NAME_FIELD = new ParseField("name");
|
||||
public static final ParseField ID_FIELD = new ParseField("id");
|
||||
public static final ParseField ACTIONS_FIELD = new ParseField("actions");
|
||||
public static final ParseField AFTER_FIELD = new ParseField("after");
|
||||
public static final Setting<TimeValue> AFTER_SETTING = Setting.positiveTimeSetting(AFTER_FIELD.getPreferredName(),
|
||||
TimeValue.timeValueSeconds(60), Property.IndexScope, Property.Dynamic);
|
||||
public static final Setting<Settings> ACTIONS_SETTING = Setting.groupSetting(ACTIONS_FIELD.getPreferredName() + ".", (settings) -> {
|
||||
if (settings.size() == 0) {
|
||||
return;
|
||||
}
|
||||
// NOCOMMIT add validation here
|
||||
}, Setting.Property.Dynamic, Setting.Property.IndexScope);
|
||||
|
||||
private String name;
|
||||
private List<Action> actions;
|
||||
private Client client;
|
||||
private TimeValue after;
|
||||
|
||||
public Phase(String name, Index index, long creationDate, Settings settings, Client client) {
|
||||
super(index.getName() + "-" + name, getSchedule(creationDate, settings));
|
||||
this.name = name;
|
||||
this.client = client;
|
||||
this.after = AFTER_SETTING.get(settings);
|
||||
this.actions = new ArrayList<>();
|
||||
Settings actionsSettings = ACTIONS_SETTING.get(settings);
|
||||
for (Map.Entry<String, Settings> e : actionsSettings.getAsGroups().entrySet()) {
|
||||
if (e.getKey().equals("delete")) {
|
||||
Action action = new DeleteAction(index);
|
||||
actions.add(action);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public TimeValue getAfter() {
|
||||
return after;
|
||||
}
|
||||
|
||||
private static Schedule getSchedule(long creationDate, Settings settings) {
|
||||
System.out.println(settings);
|
||||
TimeValue after = AFTER_SETTING.get(settings);
|
||||
SchedulerEngine.Schedule schedule = (startTime, now) -> {
|
||||
if (startTime == now) {
|
||||
return creationDate + after.getMillis();
|
||||
} else {
|
||||
return -1; // do not schedule another delete after already deleted
|
||||
}
|
||||
};
|
||||
return schedule;
|
||||
}
|
||||
|
||||
public Phase(String name, List<Action> actions, Schedule schedule, Client client) {
|
||||
super(name, schedule);
|
||||
this.name = name;
|
||||
this.actions = actions;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
protected void performActions() {
|
||||
for (Action action : actions) {
|
||||
action.execute(client);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void triggered(Event event) {
|
||||
if (event.getJobName().equals(getId())) {
|
||||
performActions();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(NAME_FIELD.getPreferredName(), name);
|
||||
builder.field(ID_FIELD.getPreferredName(), name);
|
||||
builder.field(AFTER_FIELD.getPreferredName(), after);
|
||||
builder.array(ACTIONS_FIELD.getPreferredName(), actions);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
|
@ -5,20 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.indexlifecycle;
|
||||
|
||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.core.IsNull.nullValue;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
|
@ -28,6 +16,7 @@ import org.elasticsearch.index.IndexNotFoundException;
|
|||
import org.elasticsearch.index.reindex.ReindexPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
|
@ -36,6 +25,16 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.core.IsNull.nullValue;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0)
|
||||
public class IndexLifecycleInitialisationIT extends ESIntegTestCase {
|
||||
@Override
|
||||
|
@ -77,8 +76,9 @@ public class IndexLifecycleInitialisationIT extends ESIntegTestCase {
|
|||
.put(indexSettings())
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put("index.lifecycle.timeseries.new", "baz")
|
||||
.put("index.lifecycle.timeseries.new.after", "1s")
|
||||
.put("index.lifecycle.timeseries.delete.after", "3s")
|
||||
.put("index.lifecycle.timeseries.delete.actions.delete.what", "me")
|
||||
.build();
|
||||
|
||||
// start one server
|
||||
|
|
Loading…
Reference in New Issue