mirror of
https://github.com/apache/druid.git
synced 2025-02-17 07:25:02 +00:00
better abstraction for metadatastorage
This commit is contained in:
parent
3cc1b2e690
commit
377151beda
@ -19,48 +19,51 @@
|
|||||||
|
|
||||||
package io.druid.indexing.overlord;
|
package io.druid.indexing.overlord;
|
||||||
|
|
||||||
|
import com.google.common.base.Optional;
|
||||||
|
import com.metamx.common.Pair;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public interface MetadataStorageActionHandler
|
public interface MetadataStorageActionHandler<TaskType, TaskStatusType, TaskActionType, TaskLockType>
|
||||||
{
|
{
|
||||||
/* Insert stuff on the table */
|
/* Insert stuff on the table */
|
||||||
public void insert(
|
public void insert(
|
||||||
String tableName,
|
|
||||||
String id,
|
String id,
|
||||||
String createdDate,
|
DateTime createdDate,
|
||||||
String dataSource,
|
String dataSource,
|
||||||
byte[] payload,
|
TaskType task,
|
||||||
int active,
|
boolean active,
|
||||||
byte[] statusPayload
|
TaskStatusType status
|
||||||
) throws Exception;
|
) throws TaskExistsException;
|
||||||
|
|
||||||
/* Insert stuff. @returns 1 if status of the task with the given id has been updated successfully */
|
/* Insert stuff. @returns 1 if status of the task with the given id has been updated successfully */
|
||||||
public int setStatus(String tableName, String Id, int active, byte[] statusPayload);
|
public boolean setStatus(String taskId, boolean active, TaskStatusType statusPayload);
|
||||||
|
|
||||||
/* Retrieve a task with the given ID */
|
/* Retrieve a task with the given ID */
|
||||||
public List<Map<String, Object>> getTask(String tableName, String Id);
|
public Optional<TaskType> getTask(String taskId);
|
||||||
|
|
||||||
/* Retrieve a task status with the given ID */
|
/* Retrieve a task status with the given ID */
|
||||||
public List<Map<String, Object>> getTaskStatus(String tableName, String Id);
|
public Optional<TaskStatusType> getTaskStatus(String taskId);
|
||||||
|
|
||||||
/* Retrieve active tasks */
|
/* Retrieve active tasks */
|
||||||
public List<Map<String, Object>> getActiveTasks(String tableName);
|
public List<Pair<TaskType, TaskStatusType>> getActiveTasksWithStatus();
|
||||||
|
|
||||||
/* Retrieve task statuses that have been created sooner than the given time */
|
/* Retrieve task statuses that have been created sooner than the given time */
|
||||||
public List<Map<String, Object>> getRecentlyFinishedTaskStatuses(String tableName, String recent);
|
public List<TaskStatusType> getRecentlyFinishedTaskStatuses(DateTime start);
|
||||||
|
|
||||||
/* Add lock to the task with given ID */
|
/* Add lock to the task with given ID */
|
||||||
public int addLock(String tableName, String Id, byte[] lock);
|
public int addLock(String taskId, TaskLockType lock);
|
||||||
|
|
||||||
/* Remove taskLock with given ID */
|
/* Remove taskLock with given ID */
|
||||||
public int removeLock(String tableName, long lockId);
|
public int removeLock(long lockId);
|
||||||
|
|
||||||
public int addAuditLog(String tableName, String Id, byte[] taskAction);
|
public int addAuditLog(String taskId, TaskActionType taskAction);
|
||||||
|
|
||||||
/* Get logs for task with given ID */
|
/* Get logs for task with given ID */
|
||||||
public List<Map<String, Object>> getTaskLogs(String tableName, String Id);
|
public List<TaskActionType> getTaskLogs(String taskId);
|
||||||
|
|
||||||
/* Get locks for task with given ID */
|
/* Get locks for task with given ID */
|
||||||
public List<Map<String, Object>> getTaskLocks(String tableName, String Id);
|
public Map<Long, TaskLockType> getTaskLocks(String taskId);
|
||||||
}
|
}
|
||||||
|
@ -56,8 +56,6 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.RandomAccessFile;
|
|
||||||
import java.nio.channels.Channels;
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -19,57 +19,49 @@
|
|||||||
|
|
||||||
package io.druid.indexing.overlord;
|
package io.druid.indexing.overlord;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Optional;
|
import com.google.common.base.Optional;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
|
import com.metamx.common.Pair;
|
||||||
import com.metamx.common.lifecycle.LifecycleStart;
|
import com.metamx.common.lifecycle.LifecycleStart;
|
||||||
import com.metamx.common.lifecycle.LifecycleStop;
|
import com.metamx.common.lifecycle.LifecycleStop;
|
||||||
import com.metamx.emitter.EmittingLogger;
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import io.druid.db.MetadataStorageConnector;
|
|
||||||
import io.druid.db.MetadataStorageTablesConfig;
|
|
||||||
import io.druid.indexing.common.TaskLock;
|
|
||||||
import io.druid.indexing.common.TaskStatus;
|
import io.druid.indexing.common.TaskStatus;
|
||||||
|
import io.druid.db.MetadataStorageConnector;
|
||||||
|
import io.druid.indexing.common.TaskLock;
|
||||||
import io.druid.indexing.common.actions.TaskAction;
|
import io.druid.indexing.common.actions.TaskAction;
|
||||||
import io.druid.indexing.common.config.TaskStorageConfig;
|
import io.druid.indexing.common.config.TaskStorageConfig;
|
||||||
import io.druid.indexing.common.task.Task;
|
import io.druid.indexing.common.task.Task;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.skife.jdbi.v2.exceptions.CallbackFailedException;
|
|
||||||
import org.skife.jdbi.v2.exceptions.StatementException;
|
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class MetadataTaskStorage implements TaskStorage
|
public class MetadataTaskStorage implements TaskStorage
|
||||||
{
|
{
|
||||||
private final ObjectMapper jsonMapper;
|
|
||||||
private final MetadataStorageConnector metadataStorageConnector;
|
private final MetadataStorageConnector metadataStorageConnector;
|
||||||
private final MetadataStorageTablesConfig dbTables;
|
|
||||||
private final TaskStorageConfig config;
|
private final TaskStorageConfig config;
|
||||||
private final MetadataStorageActionHandler handler;
|
private final MetadataStorageActionHandler<Task, TaskStatus, TaskAction, TaskLock> handler;
|
||||||
|
|
||||||
private static final EmittingLogger log = new EmittingLogger(MetadataTaskStorage.class);
|
private static final EmittingLogger log = new EmittingLogger(MetadataTaskStorage.class);
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public MetadataTaskStorage(
|
public MetadataTaskStorage(
|
||||||
final ObjectMapper jsonMapper,
|
|
||||||
final MetadataStorageConnector metadataStorageConnector,
|
final MetadataStorageConnector metadataStorageConnector,
|
||||||
final MetadataStorageTablesConfig dbTables,
|
|
||||||
final TaskStorageConfig config,
|
final TaskStorageConfig config,
|
||||||
final MetadataStorageActionHandler handler
|
final MetadataStorageActionHandler handler
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.jsonMapper = jsonMapper;
|
|
||||||
this.metadataStorageConnector = metadataStorageConnector;
|
this.metadataStorageConnector = metadataStorageConnector;
|
||||||
this.dbTables = dbTables;
|
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.handler = handler;
|
// this is a little janky but haven't figured out how to get Guice to do this yet.
|
||||||
|
this.handler = (MetadataStorageActionHandler<Task, TaskStatus, TaskAction, TaskLock>) handler;
|
||||||
}
|
}
|
||||||
|
|
||||||
@LifecycleStart
|
@LifecycleStart
|
||||||
@ -100,23 +92,19 @@ public class MetadataTaskStorage implements TaskStorage
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
handler.insert(
|
handler.insert(
|
||||||
dbTables.getTasksTable(),
|
|
||||||
task.getId(),
|
task.getId(),
|
||||||
new DateTime().toString(),
|
new DateTime(),
|
||||||
task.getDataSource(),
|
task.getDataSource(),
|
||||||
jsonMapper.writeValueAsBytes(task),
|
task,
|
||||||
status.isRunnable() ? 1 : 0,
|
status.isRunnable(),
|
||||||
jsonMapper.writeValueAsBytes(status)
|
status
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
final boolean isStatementException = e instanceof StatementException ||
|
if(e instanceof TaskExistsException) {
|
||||||
(e instanceof CallbackFailedException
|
throw (TaskExistsException) e;
|
||||||
&& e.getCause() instanceof StatementException);
|
|
||||||
if (isStatementException && getTask(task.getId()).isPresent()) {
|
|
||||||
throw new TaskExistsException(task.getId(), e);
|
|
||||||
} else {
|
} else {
|
||||||
throw Throwables.propagate(e);
|
Throwables.propagate(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -128,103 +116,77 @@ public class MetadataTaskStorage implements TaskStorage
|
|||||||
|
|
||||||
log.info("Updating task %s to status: %s", status.getId(), status);
|
log.info("Updating task %s to status: %s", status.getId(), status);
|
||||||
|
|
||||||
try {
|
final boolean set = handler.setStatus(
|
||||||
int updated = handler.setStatus(
|
status.getId(),
|
||||||
dbTables.getTasksTable(),
|
status.isRunnable(),
|
||||||
status.getId(),
|
status
|
||||||
status.isRunnable() ? 1 : 0,
|
);
|
||||||
jsonMapper.writeValueAsBytes(status)
|
if (!set) {
|
||||||
);
|
throw new IllegalStateException(String.format("Active task not found: %s", status.getId()));
|
||||||
if (updated != 1) {
|
|
||||||
throw new IllegalStateException(String.format("Active task not found: %s", status.getId()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<Task> getTask(final String taskid)
|
public Optional<Task> getTask(final String taskId)
|
||||||
{
|
{
|
||||||
try {
|
return handler.getTask(taskId);
|
||||||
final List<Map<String, Object>> dbTasks = handler.getTask(dbTables.getTasksTable(), taskid);
|
|
||||||
if (dbTasks.size() == 0) {
|
|
||||||
return Optional.absent();
|
|
||||||
} else {
|
|
||||||
final Map<String, Object> dbStatus = Iterables.getOnlyElement(dbTasks);
|
|
||||||
return Optional.of(jsonMapper.readValue((byte[]) dbStatus.get("payload"), Task.class));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<TaskStatus> getStatus(final String taskid)
|
public Optional<TaskStatus> getStatus(final String taskId)
|
||||||
{
|
{
|
||||||
try {
|
return handler.getTaskStatus(taskId);
|
||||||
final List<Map<String, Object>> dbStatuses = handler.getTaskStatus(dbTables.getTasksTable(), taskid);
|
|
||||||
if (dbStatuses.size() == 0) {
|
|
||||||
return Optional.absent();
|
|
||||||
} else {
|
|
||||||
final Map<String, Object> dbStatus = Iterables.getOnlyElement(dbStatuses);
|
|
||||||
return Optional.of(jsonMapper.readValue((byte[]) dbStatus.get("status_payload"), TaskStatus.class));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Task> getActiveTasks()
|
public List<Task> getActiveTasks()
|
||||||
{
|
{
|
||||||
final List<Map<String, Object>> dbTasks = handler.getActiveTasks(dbTables.getTasksTable());
|
return ImmutableList.copyOf(
|
||||||
|
Iterables.transform(
|
||||||
final ImmutableList.Builder<Task> tasks = ImmutableList.builder();
|
Iterables.filter(
|
||||||
for (final Map<String, Object> row : dbTasks) {
|
handler.getActiveTasksWithStatus(),
|
||||||
final String id = row.get("id").toString();
|
new Predicate<Pair<Task, TaskStatus>>()
|
||||||
|
{
|
||||||
try {
|
@Override
|
||||||
final Task task = jsonMapper.readValue((byte[]) row.get("payload"), Task.class);
|
public boolean apply(
|
||||||
final TaskStatus status = jsonMapper.readValue((byte[]) row.get("status_payload"), TaskStatus.class);
|
@Nullable Pair<Task, TaskStatus> input
|
||||||
|
)
|
||||||
if (status.isRunnable()) {
|
{
|
||||||
tasks.add(task);
|
return input.rhs.isRunnable();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
),
|
||||||
log.makeAlert(e, "Failed to parse task payload").addData("task", id).emit();
|
new Function<Pair<Task, TaskStatus>, Task>()
|
||||||
}
|
{
|
||||||
}
|
@Nullable
|
||||||
|
@Override
|
||||||
return tasks.build();
|
public Task apply(@Nullable Pair<Task, TaskStatus> input)
|
||||||
|
{
|
||||||
|
return input.lhs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<TaskStatus> getRecentlyFinishedTaskStatuses()
|
public List<TaskStatus> getRecentlyFinishedTaskStatuses()
|
||||||
{
|
{
|
||||||
final DateTime recent = new DateTime().minus(config.getRecentlyFinishedThreshold());
|
final DateTime start = new DateTime().minus(config.getRecentlyFinishedThreshold());
|
||||||
|
|
||||||
final List<Map<String, Object>> dbTasks = handler.getRecentlyFinishedTaskStatuses(dbTables.getTasksTable(), recent.toString());
|
return ImmutableList.copyOf(
|
||||||
final ImmutableList.Builder<TaskStatus> statuses = ImmutableList.builder();
|
Iterables.filter(
|
||||||
for (final Map<String, Object> row : dbTasks) {
|
handler.getRecentlyFinishedTaskStatuses(start),
|
||||||
final String id = row.get("id").toString();
|
new Predicate<TaskStatus>()
|
||||||
|
{
|
||||||
try {
|
@Override
|
||||||
final TaskStatus status = jsonMapper.readValue((byte[]) row.get("status_payload"), TaskStatus.class);
|
public boolean apply(TaskStatus status)
|
||||||
if (status.isComplete()) {
|
{
|
||||||
statuses.add(status);
|
return status.isComplete();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
)
|
||||||
log.makeAlert(e, "Failed to parse status payload").addData("task", id).emit();
|
);
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return statuses.build();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -240,14 +202,7 @@ public class MetadataTaskStorage implements TaskStorage
|
|||||||
taskid
|
taskid
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
handler.addLock(taskid, taskLock);
|
||||||
handler.addLock(dbTables.getTaskLockTable(), taskid, jsonMapper.writeValueAsBytes(taskLock));
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -264,7 +219,7 @@ public class MetadataTaskStorage implements TaskStorage
|
|||||||
|
|
||||||
if (taskLock.equals(taskLockToRemove)) {
|
if (taskLock.equals(taskLockToRemove)) {
|
||||||
log.info("Deleting TaskLock with id[%d]: %s", id, taskLock);
|
log.info("Deleting TaskLock with id[%d]: %s", id, taskLock);
|
||||||
handler.removeLock(dbTables.getTaskLockTable(), id);
|
handler.removeLock(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -293,52 +248,17 @@ public class MetadataTaskStorage implements TaskStorage
|
|||||||
|
|
||||||
log.info("Logging action for task[%s]: %s", task.getId(), taskAction);
|
log.info("Logging action for task[%s]: %s", task.getId(), taskAction);
|
||||||
|
|
||||||
try {
|
handler.addAuditLog(task.getId(), taskAction);
|
||||||
handler.addAuditLog(dbTables.getTaskLogTable(), task.getId(), jsonMapper.writeValueAsBytes(taskAction));
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<TaskAction> getAuditLogs(final String taskid)
|
public List<TaskAction> getAuditLogs(final String taskId)
|
||||||
{
|
{
|
||||||
final List<Map<String, Object>> dbTaskLogs = handler.getTaskLogs(dbTables.getTaskLogTable(), taskid);
|
return handler.getTaskLogs(taskId);
|
||||||
final List<TaskAction> retList = Lists.newArrayList();
|
|
||||||
for (final Map<String, Object> dbTaskLog : dbTaskLogs) {
|
|
||||||
try {
|
|
||||||
retList.add(jsonMapper.readValue((byte[]) dbTaskLog.get("log_payload"), TaskAction.class));
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
log.makeAlert(e, "Failed to deserialize TaskLog")
|
|
||||||
.addData("task", taskid)
|
|
||||||
.addData("logPayload", dbTaskLog)
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return retList;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<Long, TaskLock> getLocksWithIds(final String taskid)
|
private Map<Long, TaskLock> getLocksWithIds(final String taskid)
|
||||||
{
|
{
|
||||||
final List<Map<String, Object>> dbTaskLocks = handler.getTaskLocks(dbTables.getTaskLockTable(), taskid);
|
return handler.getTaskLocks(taskid);
|
||||||
|
|
||||||
final Map<Long, TaskLock> retMap = Maps.newHashMap();
|
|
||||||
for (final Map<String, Object> row : dbTaskLocks) {
|
|
||||||
try {
|
|
||||||
retMap.put(
|
|
||||||
(Long) row.get("id"),
|
|
||||||
jsonMapper.readValue((byte[]) row.get("lock_payload"), TaskLock.class)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
log.makeAlert(e, "Failed to deserialize TaskLock")
|
|
||||||
.addData("task", taskid)
|
|
||||||
.addData("lockPayload", row)
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return retMap;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,6 @@ import com.google.common.collect.Sets;
|
|||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import com.metamx.emitter.EmittingLogger;
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import com.metamx.emitter.service.ServiceEmitter;
|
import com.metamx.emitter.service.ServiceEmitter;
|
||||||
import io.druid.common.guava.DSuppliers;
|
|
||||||
import io.druid.curator.PotentiallyGzippedCompressionProvider;
|
import io.druid.curator.PotentiallyGzippedCompressionProvider;
|
||||||
import io.druid.curator.cache.SimplePathChildrenCacheFactory;
|
import io.druid.curator.cache.SimplePathChildrenCacheFactory;
|
||||||
import io.druid.indexing.common.IndexingServiceCondition;
|
import io.druid.indexing.common.IndexingServiceCondition;
|
||||||
@ -41,7 +40,6 @@ import io.druid.indexing.common.task.Task;
|
|||||||
import io.druid.indexing.common.task.TaskResource;
|
import io.druid.indexing.common.task.TaskResource;
|
||||||
import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig;
|
import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig;
|
||||||
import io.druid.indexing.overlord.setup.FillCapacityWorkerSelectStrategy;
|
import io.druid.indexing.overlord.setup.FillCapacityWorkerSelectStrategy;
|
||||||
import io.druid.indexing.overlord.setup.WorkerSetupData;
|
|
||||||
import io.druid.indexing.worker.TaskAnnouncement;
|
import io.druid.indexing.worker.TaskAnnouncement;
|
||||||
import io.druid.indexing.worker.Worker;
|
import io.druid.indexing.worker.Worker;
|
||||||
import io.druid.jackson.DefaultObjectMapper;
|
import io.druid.jackson.DefaultObjectMapper;
|
||||||
@ -59,7 +57,6 @@ import org.junit.Test;
|
|||||||
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Future;
|
import java.util.concurrent.Future;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
|
|
||||||
public class RemoteTaskRunnerTest
|
public class RemoteTaskRunnerTest
|
||||||
{
|
{
|
||||||
|
@ -19,17 +19,37 @@
|
|||||||
|
|
||||||
package io.druid.db;
|
package io.druid.db;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.api.client.util.Maps;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import com.google.common.base.Optional;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
|
import com.google.inject.name.Named;
|
||||||
|
import com.metamx.common.Pair;
|
||||||
import com.metamx.common.RetryUtils;
|
import com.metamx.common.RetryUtils;
|
||||||
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import io.druid.indexing.overlord.MetadataStorageActionHandler;
|
import io.druid.indexing.overlord.MetadataStorageActionHandler;
|
||||||
|
import io.druid.indexing.overlord.TaskExistsException;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.skife.jdbi.v2.FoldController;
|
||||||
|
import org.skife.jdbi.v2.Folder3;
|
||||||
import org.skife.jdbi.v2.Handle;
|
import org.skife.jdbi.v2.Handle;
|
||||||
import org.skife.jdbi.v2.IDBI;
|
import org.skife.jdbi.v2.IDBI;
|
||||||
|
import org.skife.jdbi.v2.StatementContext;
|
||||||
|
import org.skife.jdbi.v2.exceptions.CallbackFailedException;
|
||||||
import org.skife.jdbi.v2.exceptions.DBIException;
|
import org.skife.jdbi.v2.exceptions.DBIException;
|
||||||
|
import org.skife.jdbi.v2.exceptions.StatementException;
|
||||||
import org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException;
|
import org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException;
|
||||||
import org.skife.jdbi.v2.tweak.HandleCallback;
|
import org.skife.jdbi.v2.tweak.HandleCallback;
|
||||||
|
import org.skife.jdbi.v2.tweak.ResultSetMapper;
|
||||||
|
import org.skife.jdbi.v2.util.ByteArrayMapper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.sql.SQLRecoverableException;
|
import java.sql.SQLRecoverableException;
|
||||||
import java.sql.SQLTransientException;
|
import java.sql.SQLTransientException;
|
||||||
@ -37,98 +57,148 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
public class SQLMetadataStorageActionHandler implements MetadataStorageActionHandler
|
public class SQLMetadataStorageActionHandler<TaskType, TaskStatusType, TaskActionType, TaskLockType>
|
||||||
|
implements MetadataStorageActionHandler<TaskType, TaskStatusType, TaskActionType, TaskLockType>
|
||||||
{
|
{
|
||||||
|
private static final EmittingLogger log = new EmittingLogger(SQLMetadataStorageActionHandler.class);
|
||||||
|
|
||||||
private final IDBI dbi;
|
private final IDBI dbi;
|
||||||
private final SQLMetadataConnector connector;
|
private final SQLMetadataConnector connector;
|
||||||
|
private final MetadataStorageTablesConfig config;
|
||||||
|
private final ObjectMapper jsonMapper;
|
||||||
|
private final TypeReference taskType;
|
||||||
|
private final TypeReference taskStatusType;
|
||||||
|
private final TypeReference taskActionType;
|
||||||
|
private final TypeReference taskLockType;
|
||||||
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public SQLMetadataStorageActionHandler(
|
public SQLMetadataStorageActionHandler(
|
||||||
final IDBI dbi,
|
final IDBI dbi,
|
||||||
final SQLMetadataConnector connector
|
final SQLMetadataConnector connector,
|
||||||
)
|
final MetadataStorageTablesConfig config,
|
||||||
|
final ObjectMapper jsonMapper,
|
||||||
|
// we all love type erasure
|
||||||
|
final @Named("taskType") TypeReference taskType,
|
||||||
|
final @Named("taskStatusType") TypeReference taskStatusType,
|
||||||
|
final @Named("taskActionType") TypeReference taskActionType,
|
||||||
|
final @Named("taskLockType") TypeReference taskLockType
|
||||||
|
)
|
||||||
{
|
{
|
||||||
this.dbi = dbi;
|
this.dbi = dbi;
|
||||||
this.connector = connector;
|
this.connector = connector;
|
||||||
|
this.config = config;
|
||||||
|
this.jsonMapper = jsonMapper;
|
||||||
|
this.taskType = taskType;
|
||||||
|
this.taskStatusType = taskStatusType;
|
||||||
|
this.taskActionType = taskActionType;
|
||||||
|
this.taskLockType = taskLockType;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Insert stuff. @returns number of entries inserted on success */
|
/**
|
||||||
|
* Insert stuff
|
||||||
|
*
|
||||||
|
*/
|
||||||
public void insert(
|
public void insert(
|
||||||
final String tableName,
|
|
||||||
final String id,
|
final String id,
|
||||||
final String createdDate,
|
final DateTime createdDate,
|
||||||
final String dataSource,
|
final String dataSource,
|
||||||
final byte[] payload,
|
final TaskType task,
|
||||||
final int active,
|
final boolean active,
|
||||||
final byte[] statusPayload
|
final TaskStatusType status
|
||||||
)
|
) throws TaskExistsException
|
||||||
{
|
{
|
||||||
retryingHandle(
|
try {
|
||||||
new HandleCallback<Void>()
|
retryingHandle(
|
||||||
{
|
new HandleCallback<Void>()
|
||||||
@Override
|
|
||||||
public Void withHandle(Handle handle) throws Exception
|
|
||||||
{
|
{
|
||||||
handle.createStatement(
|
@Override
|
||||||
String.format(
|
public Void withHandle(Handle handle) throws Exception
|
||||||
"INSERT INTO %s (id, created_date, datasource, payload, active, status_payload) VALUES (:id, :created_date, :datasource, :payload, :active, :status_payload)",
|
{
|
||||||
tableName
|
handle.createStatement(
|
||||||
)
|
String.format(
|
||||||
)
|
"INSERT INTO %s (id, created_date, datasource, payload, active, status_payload) VALUES (:id, :created_date, :datasource, :payload, :active, :status_payload)",
|
||||||
.bind("id", id)
|
config.getTasksTable()
|
||||||
.bind("created_date", createdDate)
|
)
|
||||||
.bind("datasource", dataSource)
|
)
|
||||||
.bind("payload", payload)
|
.bind("id", id)
|
||||||
.bind("active", active)
|
.bind("created_date", createdDate)
|
||||||
.bind("status_payload", statusPayload)
|
.bind("datasource", dataSource)
|
||||||
.execute();
|
.bind("payload", jsonMapper.writeValueAsBytes(task))
|
||||||
return null;
|
.bind("active", active)
|
||||||
|
.bind("status_payload", jsonMapper.writeValueAsBytes(status))
|
||||||
|
.execute();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
);
|
} catch(Exception e) {
|
||||||
|
final boolean isStatementException = e instanceof StatementException ||
|
||||||
|
(e instanceof CallbackFailedException
|
||||||
|
&& e.getCause() instanceof StatementException);
|
||||||
|
if (isStatementException && getTask(id).isPresent()) {
|
||||||
|
throw new TaskExistsException(id, e);
|
||||||
|
} else {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Insert stuff. @returns 1 if status of the task with the given id has been updated successfully */
|
/**
|
||||||
public int setStatus(final String tableName, final String Id, final int active, final byte[] statusPayload)
|
* Update task status.
|
||||||
|
*
|
||||||
|
* @return true if status of the task with the given id has been updated successfully
|
||||||
|
*/
|
||||||
|
public boolean setStatus(final String taskId, final boolean active, final TaskStatusType status)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<Integer>()
|
new HandleCallback<Boolean>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public Integer withHandle(Handle handle) throws Exception
|
public Boolean withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createStatement(
|
return handle.createStatement(
|
||||||
String.format(
|
String.format(
|
||||||
"UPDATE %s SET active = :active, status_payload = :status_payload WHERE id = :id AND active = 1",
|
"UPDATE %s SET active = :active, status_payload = :status_payload WHERE id = :id AND active = 1",
|
||||||
tableName
|
config.getTasksTable()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.bind("id", Id)
|
.bind("id", taskId)
|
||||||
.bind("active", active)
|
.bind("active", active)
|
||||||
.bind("status_payload", statusPayload)
|
.bind("status_payload", jsonMapper.writeValueAsBytes(status))
|
||||||
.execute();
|
.execute() == 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Retrieve a task with the given ID */
|
/* */
|
||||||
public List<Map<String, Object>> getTask(final String tableName, final String Id)
|
|
||||||
|
/**
|
||||||
|
* Retrieve a task with the given ID
|
||||||
|
*
|
||||||
|
* @param taskId task ID
|
||||||
|
* @return task if it exists
|
||||||
|
*/
|
||||||
|
public Optional<TaskType> getTask(final String taskId)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<Optional<TaskType>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
public Optional<TaskType> withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
return Optional.fromNullable(
|
||||||
String.format(
|
jsonMapper.<TaskType>readValue(
|
||||||
"SELECT payload FROM %s WHERE id = :id",
|
handle.createQuery(
|
||||||
tableName
|
String.format("SELECT payload FROM %s WHERE id = :id", config.getTasksTable())
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.bind("id", Id)
|
.bind("id", taskId)
|
||||||
.list();
|
.map(ByteArrayMapper.FIRST)
|
||||||
|
.first(),
|
||||||
|
taskType
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -136,42 +206,72 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Retrieve a task status with the given ID */
|
/* Retrieve a task status with the given ID */
|
||||||
public List<Map<String, Object>> getTaskStatus(final String tableName, final String Id)
|
public Optional<TaskStatusType> getTaskStatus(final String taskId)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<Optional<TaskStatusType>>()
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
@Override
|
||||||
String.format(
|
public Optional<TaskStatusType> withHandle(Handle handle) throws Exception
|
||||||
"SELECT status_payload FROM %s WHERE id = :id",
|
{
|
||||||
tableName
|
return Optional.fromNullable(
|
||||||
)
|
jsonMapper.<TaskStatusType>readValue(
|
||||||
)
|
handle.createQuery(
|
||||||
.bind("id", Id)
|
String.format("SELECT status_payload FROM %s WHERE id = :id", config.getTasksTable())
|
||||||
.list();
|
)
|
||||||
|
.bind("id", taskId)
|
||||||
|
.map(ByteArrayMapper.FIRST)
|
||||||
|
.first(),
|
||||||
|
taskStatusType
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Retrieve active tasks */
|
/* Retrieve active tasks */
|
||||||
public List<Map<String, Object>> getActiveTasks(final String tableName)
|
public List<Pair<TaskType, TaskStatusType>> getActiveTasksWithStatus()
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<List<Pair<TaskType, TaskStatusType>>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
public List<Pair<TaskType, TaskStatusType>> withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
return handle
|
||||||
String.format(
|
.createQuery(
|
||||||
"SELECT id, payload, status_payload FROM %s WHERE active = 1 ORDER BY created_date",
|
String.format(
|
||||||
tableName
|
"SELECT id, payload, status_payload FROM %s WHERE active = TRUE ORDER BY created_date",
|
||||||
)
|
config.getTasksTable()
|
||||||
).list();
|
)
|
||||||
|
)
|
||||||
|
.map(
|
||||||
|
new ResultSetMapper<Pair<TaskType, TaskStatusType>>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Pair<TaskType, TaskStatusType> map(int index, ResultSet r, StatementContext ctx)
|
||||||
|
throws SQLException
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return Pair.of(
|
||||||
|
jsonMapper.<TaskType>readValue(
|
||||||
|
r.getBytes("payload"),
|
||||||
|
taskType
|
||||||
|
),
|
||||||
|
jsonMapper.<TaskStatusType>readValue(
|
||||||
|
r.getBytes("status_payload"),
|
||||||
|
taskStatusType
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
log.makeAlert(e, "Failed to parse task payload").addData("task", r.getString("id")).emit();
|
||||||
|
throw new SQLException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).list();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -179,27 +279,49 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Retrieve task statuses that have been created sooner than the given time */
|
/* Retrieve task statuses that have been created sooner than the given time */
|
||||||
public List<Map<String, Object>> getRecentlyFinishedTaskStatuses(final String tableName, final String recent)
|
public List<TaskStatusType> getRecentlyFinishedTaskStatuses(final DateTime start)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<List<TaskStatusType>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
public List<TaskStatusType> withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
return handle
|
||||||
String.format(
|
.createQuery(
|
||||||
"SELECT id, status_payload FROM %s WHERE active = 0 AND created_date >= :recent ORDER BY created_date DESC",
|
String.format(
|
||||||
tableName
|
"SELECT id, status_payload FROM %s WHERE active = FALSE AND created_date >= :start ORDER BY created_date DESC",
|
||||||
)
|
config.getTasksTable()
|
||||||
).bind("recent", recent.toString()).list();
|
)
|
||||||
|
).bind("start", start.toString())
|
||||||
|
.map(
|
||||||
|
new ResultSetMapper<TaskStatusType>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public TaskStatusType map(int index, ResultSet r, StatementContext ctx) throws SQLException
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return jsonMapper.readValue(
|
||||||
|
r.getBytes("status_payload"),
|
||||||
|
taskStatusType
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
log.makeAlert(e, "Failed to parse status payload")
|
||||||
|
.addData("task", r.getString("id"))
|
||||||
|
.emit();
|
||||||
|
throw new SQLException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).list();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Add lock to the task with given ID */
|
/* Add lock to the task with given ID */
|
||||||
public int addLock(final String tableName, final String Id, final byte[] lock)
|
public int addLock(final String taskId, final TaskLockType lock)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<Integer>()
|
new HandleCallback<Integer>()
|
||||||
@ -210,11 +332,11 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
return handle.createStatement(
|
return handle.createStatement(
|
||||||
String.format(
|
String.format(
|
||||||
"INSERT INTO %s (task_id, lock_payload) VALUES (:task_id, :lock_payload)",
|
"INSERT INTO %s (task_id, lock_payload) VALUES (:task_id, :lock_payload)",
|
||||||
tableName
|
config.getTaskLockTable()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.bind("task_id", Id)
|
.bind("task_id", taskId)
|
||||||
.bind("lock_payload", lock)
|
.bind("lock_payload", jsonMapper.writeValueAsBytes(lock))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -222,7 +344,7 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Remove taskLock with given ID */
|
/* Remove taskLock with given ID */
|
||||||
public int removeLock(final String tableName, final long lockId)
|
public int removeLock(final long lockId)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<Integer>()
|
new HandleCallback<Integer>()
|
||||||
@ -233,7 +355,7 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
return handle.createStatement(
|
return handle.createStatement(
|
||||||
String.format(
|
String.format(
|
||||||
"DELETE FROM %s WHERE id = :id",
|
"DELETE FROM %s WHERE id = :id",
|
||||||
tableName
|
config.getTaskLockTable()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.bind("id", lockId)
|
.bind("id", lockId)
|
||||||
@ -243,7 +365,7 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int addAuditLog(final String tableName, final String Id, final byte[] taskAction)
|
public int addAuditLog(final String taskId, final TaskActionType taskAction)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<Integer>()
|
new HandleCallback<Integer>()
|
||||||
@ -254,11 +376,11 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
return handle.createStatement(
|
return handle.createStatement(
|
||||||
String.format(
|
String.format(
|
||||||
"INSERT INTO %s (task_id, log_payload) VALUES (:task_id, :log_payload)",
|
"INSERT INTO %s (task_id, log_payload) VALUES (:task_id, :log_payload)",
|
||||||
tableName
|
config.getTaskLogTable()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.bind("task_id", Id)
|
.bind("task_id", taskId)
|
||||||
.bind("log_payload", taskAction)
|
.bind("log_payload", jsonMapper.writeValueAsBytes(taskAction))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -266,44 +388,116 @@ public class SQLMetadataStorageActionHandler implements MetadataStorageActionHan
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Get logs for task with given ID */
|
/* Get logs for task with given ID */
|
||||||
public List<Map<String, Object>> getTaskLogs(final String tableName, final String Id)
|
public List<TaskActionType> getTaskLogs(final String taskId)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<List<TaskActionType>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
public List<TaskActionType> withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
return handle
|
||||||
String.format(
|
.createQuery(
|
||||||
"SELECT log_payload FROM %s WHERE task_id = :task_id",
|
String.format(
|
||||||
tableName
|
"SELECT log_payload FROM %s WHERE task_id = :task_id",
|
||||||
)
|
config.getTaskLogTable()
|
||||||
)
|
)
|
||||||
.bind("task_id", Id)
|
)
|
||||||
.list();
|
.bind("task_id", taskId)
|
||||||
|
.map(ByteArrayMapper.FIRST)
|
||||||
|
.fold(
|
||||||
|
Lists.<TaskActionType>newLinkedList(),
|
||||||
|
new Folder3<List<TaskActionType>, byte[]>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public List<TaskActionType> fold(
|
||||||
|
List<TaskActionType> list, byte[] bytes, FoldController control, StatementContext ctx
|
||||||
|
) throws SQLException
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
list.add(
|
||||||
|
jsonMapper.<TaskActionType>readValue(
|
||||||
|
bytes, taskActionType
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
log.makeAlert(e, "Failed to deserialize TaskLog")
|
||||||
|
.addData("task", taskId)
|
||||||
|
.addData("logPayload", new String(bytes, Charsets.UTF_8))
|
||||||
|
.emit();
|
||||||
|
throw new SQLException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get locks for task with given ID */
|
/* Get locks for task with given ID */
|
||||||
public List<Map<String, Object>> getTaskLocks(final String tableName, final String Id)
|
public Map<Long, TaskLockType> getTaskLocks(final String taskId)
|
||||||
{
|
{
|
||||||
return retryingHandle(
|
return retryingHandle(
|
||||||
new HandleCallback<List<Map<String, Object>>>()
|
new HandleCallback<Map<Long, TaskLockType>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public List<Map<String, Object>> withHandle(Handle handle) throws Exception
|
public Map<Long, TaskLockType> withHandle(Handle handle) throws Exception
|
||||||
{
|
{
|
||||||
return handle.createQuery(
|
return handle.createQuery(
|
||||||
String.format(
|
String.format(
|
||||||
"SELECT id, lock_payload FROM %s WHERE task_id = :task_id",
|
"SELECT id, lock_payload FROM %s WHERE task_id = :task_id",
|
||||||
tableName
|
config.getTaskLockTable()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.bind("task_id", Id)
|
.bind("task_id", taskId)
|
||||||
.list();
|
.map(
|
||||||
|
new ResultSetMapper<Pair<Long, TaskLockType>>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Pair<Long, TaskLockType> map(int index, ResultSet r, StatementContext ctx)
|
||||||
|
throws SQLException
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return Pair.of(
|
||||||
|
r.getLong("id"),
|
||||||
|
jsonMapper.<TaskLockType>readValue(
|
||||||
|
r.getBytes("lock_payload"),
|
||||||
|
taskLockType
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
log.makeAlert(e, "Failed to deserialize TaskLock")
|
||||||
|
.addData("task", r.getLong("id"))
|
||||||
|
.addData(
|
||||||
|
"lockPayload", new String(r.getBytes("lock_payload"), Charsets.UTF_8)
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
throw new SQLException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.fold(
|
||||||
|
Maps.<Long, TaskLockType>newLinkedHashMap(),
|
||||||
|
new Folder3<Map<Long, TaskLockType>, Pair<Long, TaskLockType>>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Map<Long, TaskLockType> fold(
|
||||||
|
Map<Long, TaskLockType> accumulator,
|
||||||
|
Pair<Long, TaskLockType> lock,
|
||||||
|
FoldController control,
|
||||||
|
StatementContext ctx
|
||||||
|
) throws SQLException
|
||||||
|
{
|
||||||
|
accumulator.put(lock.lhs, lock.rhs);
|
||||||
|
return accumulator;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package io.druid.cli;
|
package io.druid.cli;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.inject.Binder;
|
import com.google.inject.Binder;
|
||||||
import com.google.inject.Injector;
|
import com.google.inject.Injector;
|
||||||
@ -31,6 +32,7 @@ import com.google.inject.servlet.GuiceFilter;
|
|||||||
import com.google.inject.util.Providers;
|
import com.google.inject.util.Providers;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.airlift.command.Command;
|
import io.airlift.command.Command;
|
||||||
|
import io.druid.indexing.common.TaskStatus;
|
||||||
import io.druid.db.IndexerSQLMetadataStorageCoordinator;
|
import io.druid.db.IndexerSQLMetadataStorageCoordinator;
|
||||||
import io.druid.guice.IndexingServiceFirehoseModule;
|
import io.druid.guice.IndexingServiceFirehoseModule;
|
||||||
import io.druid.guice.IndexingServiceModuleHelper;
|
import io.druid.guice.IndexingServiceModuleHelper;
|
||||||
@ -43,17 +45,20 @@ import io.druid.guice.LifecycleModule;
|
|||||||
import io.druid.guice.ListProvider;
|
import io.druid.guice.ListProvider;
|
||||||
import io.druid.guice.ManageLifecycle;
|
import io.druid.guice.ManageLifecycle;
|
||||||
import io.druid.guice.PolyBind;
|
import io.druid.guice.PolyBind;
|
||||||
|
import io.druid.indexing.common.TaskLock;
|
||||||
import io.druid.indexing.common.actions.LocalTaskActionClientFactory;
|
import io.druid.indexing.common.actions.LocalTaskActionClientFactory;
|
||||||
|
import io.druid.indexing.common.actions.TaskAction;
|
||||||
import io.druid.indexing.common.actions.TaskActionClientFactory;
|
import io.druid.indexing.common.actions.TaskActionClientFactory;
|
||||||
import io.druid.indexing.common.actions.TaskActionToolbox;
|
import io.druid.indexing.common.actions.TaskActionToolbox;
|
||||||
import io.druid.indexing.common.config.TaskConfig;
|
import io.druid.indexing.common.config.TaskConfig;
|
||||||
import io.druid.indexing.common.config.TaskStorageConfig;
|
import io.druid.indexing.common.config.TaskStorageConfig;
|
||||||
|
import io.druid.indexing.common.task.Task;
|
||||||
import io.druid.indexing.common.tasklogs.SwitchingTaskLogStreamer;
|
import io.druid.indexing.common.tasklogs.SwitchingTaskLogStreamer;
|
||||||
import io.druid.indexing.common.tasklogs.TaskRunnerTaskLogStreamer;
|
import io.druid.indexing.common.tasklogs.TaskRunnerTaskLogStreamer;
|
||||||
import io.druid.indexing.overlord.MetadataTaskStorage;
|
|
||||||
import io.druid.indexing.overlord.ForkingTaskRunnerFactory;
|
import io.druid.indexing.overlord.ForkingTaskRunnerFactory;
|
||||||
import io.druid.indexing.overlord.HeapMemoryTaskStorage;
|
import io.druid.indexing.overlord.HeapMemoryTaskStorage;
|
||||||
import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator;
|
import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator;
|
||||||
|
import io.druid.indexing.overlord.MetadataTaskStorage;
|
||||||
import io.druid.indexing.overlord.RemoteTaskRunnerFactory;
|
import io.druid.indexing.overlord.RemoteTaskRunnerFactory;
|
||||||
import io.druid.indexing.overlord.TaskLockbox;
|
import io.druid.indexing.overlord.TaskLockbox;
|
||||||
import io.druid.indexing.overlord.TaskMaster;
|
import io.druid.indexing.overlord.TaskMaster;
|
||||||
@ -180,6 +185,24 @@ public class CliOverlord extends ServerRunnable
|
|||||||
|
|
||||||
storageBinder.addBinding("db").to(MetadataTaskStorage.class).in(ManageLifecycle.class);
|
storageBinder.addBinding("db").to(MetadataTaskStorage.class).in(ManageLifecycle.class);
|
||||||
binder.bind(MetadataTaskStorage.class).in(LazySingleton.class);
|
binder.bind(MetadataTaskStorage.class).in(LazySingleton.class);
|
||||||
|
|
||||||
|
// gotta love type erasure
|
||||||
|
binder.bind(TypeReference.class).annotatedWith(Names.named("taskType")).toInstance(
|
||||||
|
new TypeReference<Task>()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
);
|
||||||
|
binder.bind(TypeReference.class).annotatedWith(Names.named("taskStatusType")).toInstance(new TypeReference<TaskStatus>(){});
|
||||||
|
binder.bind(TypeReference.class).annotatedWith(Names.named("taskActionType")).toInstance(
|
||||||
|
new TypeReference<TaskAction>()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
);
|
||||||
|
binder.bind(TypeReference.class).annotatedWith(Names.named("taskLockType")).toInstance(
|
||||||
|
new TypeReference<TaskLock>()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void configureRunners(Binder binder)
|
private void configureRunners(Binder binder)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user