fix things up according to code review comments

This commit is contained in:
fjy 2013-09-26 11:35:45 -07:00
parent 0461e4688f
commit 8bc56daa66
5 changed files with 5 additions and 69 deletions

View File

@ -97,27 +97,6 @@ public class HadoopDruidIndexerConfig
public static HadoopDruidIndexerConfig fromMap(Map<String, Object> argSpec)
{
//List<Registererer> registererers = Lists.transform(
// MapUtils.getList(argSpec, "registererers", ImmutableList.of()),
// new Function<Object, Registererer>()
// {
// @Override
// public Registererer apply(@Nullable Object input)
// {
// try {
// return (Registererer) Class.forName((String) input).newInstance();
// }
// catch (Exception e) {
// throw Throwables.propagate(e);
// }
// }
// }
//);
//if (!registererers.isEmpty()) {
// Registererers.registerHandlers(registererers, Arrays.asList(jsonMapper));
//}
return jsonMapper.convertValue(argSpec, HadoopDruidIndexerConfig.class);
}
@ -179,7 +158,6 @@ public class HadoopDruidIndexerConfig
private volatile DataRollupSpec rollupSpec;
private volatile DbUpdaterJobSpec updaterJobSpec;
private volatile boolean ignoreInvalidRows = false;
//private volatile List<String> registererers = Lists.newArrayList();
@JsonCreator
public HadoopDruidIndexerConfig(
@ -204,7 +182,6 @@ public class HadoopDruidIndexerConfig
final @JsonProperty("rollupSpec") DataRollupSpec rollupSpec,
final @JsonProperty("updaterJobSpec") DbUpdaterJobSpec updaterJobSpec,
final @JsonProperty("ignoreInvalidRows") boolean ignoreInvalidRows
//final @JsonProperty("registererers") List<String> registererers
)
{
this.dataSource = dataSource;
@ -224,7 +201,6 @@ public class HadoopDruidIndexerConfig
this.rollupSpec = rollupSpec;
this.updaterJobSpec = updaterJobSpec;
this.ignoreInvalidRows = ignoreInvalidRows;
//this.registererers = registererers;
if(partitionsSpec != null) {
Preconditions.checkArgument(
@ -517,17 +493,6 @@ public class HadoopDruidIndexerConfig
this.ignoreInvalidRows = ignoreInvalidRows;
}
//@JsonProperty
//public List<String> getRegistererers()
//{
// return registererers;
//}
//
//public void setRegistererers(List<String> registererers)
//{
// this.registererers = registererers;
//}
/********************************************
Granularity/Bucket Helper Methods
********************************************/

View File

@ -360,7 +360,6 @@ public class TaskSerdeTest
new DataRollupSpec(ImmutableList.<AggregatorFactory>of(), QueryGranularity.NONE),
null,
false
//ImmutableList.<String>of()
)
);

View File

@ -120,6 +120,7 @@ public class DiscoveryModule implements Module
*/
public static void registerKey(Binder binder, Key<DruidNode> key)
{
DruidBinders.discoveryAnnouncementBinder(binder).addBinding().toInstance(new KeyHolder<>(key));
LifecycleModule.registerKey(binder, key);
}

View File

@ -19,10 +19,8 @@
package io.druid.cli;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.api.client.repackaged.com.google.common.base.Throwables;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.io.CharStreams;
import com.google.common.io.InputSupplier;
@ -32,16 +30,11 @@ import com.google.inject.Module;
import com.google.inject.Provides;
import com.metamx.common.lifecycle.Lifecycle;
import com.metamx.common.logger.Logger;
import com.metamx.emitter.core.LoggingEmitter;
import com.metamx.emitter.core.LoggingEmitterConfig;
import com.metamx.emitter.service.ServiceEmitter;
import io.airlift.command.Arguments;
import io.airlift.command.Command;
import io.druid.guice.LazySingleton;
import io.druid.guice.ManageLifecycle;
import io.druid.indexer.HadoopDruidIndexerConfig;
import io.druid.indexer.HadoopDruidIndexerJob;
import io.druid.initialization.LogLevelAdjuster;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -55,7 +48,7 @@ import java.util.List;
*/
@Command(
name = "hadoop",
description = "Runs the batch Hadoop Druid Indexer, see LINK GOES HERE for a description."
description = "Runs the batch Hadoop Druid Indexer, see https://github.com/metamx/druid/wiki/Batch-ingestion for a description."
)
public class CliHadoopIndexer extends GuiceRunnable
{
@ -122,21 +115,10 @@ public class CliHadoopIndexer extends GuiceRunnable
public void run()
{
try {
LogLevelAdjuster.register();
final Injector injector = Initialization.makeInjectorWithModules(
getBaseInjector(), getModules()
);
final Lifecycle lifecycle = injector.getInstance(Lifecycle.class);
Injector injector = makeInjector();
final HadoopDruidIndexerJob job = injector.getInstance(HadoopDruidIndexerJob.class);
try {
lifecycle.start();
}
catch (Throwable t) {
log.error(t, "Error when starting up. Failing.");
System.exit(1);
}
Lifecycle lifecycle = initLifecycle(injector);
job.run();
@ -150,7 +132,7 @@ public class CliHadoopIndexer extends GuiceRunnable
}
catch (Exception e) {
throw com.google.common.base.Throwables.propagate(e);
throw Throwables.propagate(e);
}
}

View File

@ -28,11 +28,6 @@ public abstract class GuiceRunnable implements Runnable
this.baseInjector = injector;
}
public Injector getBaseInjector()
{
return baseInjector;
}
protected abstract List<Object> getModules();
public Injector makeInjector()
@ -67,10 +62,4 @@ public abstract class GuiceRunnable implements Runnable
throw Throwables.propagate(e);
}
}
@Override
public void run()
{
initLifecycle(makeInjector());
}
}