DbConnectorConfig: Serialization is hard

This commit is contained in:
Gian Merlino 2013-05-14 14:39:29 -07:00
parent 8999080801
commit 343af872b5
3 changed files with 49 additions and 12 deletions

View File

@ -181,7 +181,7 @@ public class DbConnector
dataSource.setPassword(config.getDatabasePassword()); dataSource.setPassword(config.getDatabasePassword());
dataSource.setUrl(config.getDatabaseConnectURI()); dataSource.setUrl(config.getDatabaseConnectURI());
if (config.isValidationQuery()) { if (config.useValidationQuery()) {
dataSource.setValidationQuery(config.getValidationQuery()); dataSource.setValidationQuery(config.getValidationQuery());
dataSource.setTestOnBorrow(true); dataSource.setTestOnBorrow(true);
} }

View File

@ -44,7 +44,7 @@ public abstract class DbConnectorConfig
@JsonProperty("useValidationQuery") @JsonProperty("useValidationQuery")
@Config("druid.database.validation") @Config("druid.database.validation")
public boolean isValidationQuery() { public boolean useValidationQuery() {
return false; return false;
} }

View File

@ -24,6 +24,7 @@ import com.google.common.base.Throwables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.metamx.druid.indexer.granularity.UniformGranularitySpec; import com.metamx.druid.indexer.granularity.UniformGranularitySpec;
import com.metamx.druid.indexer.partitions.PartitionsSpec; import com.metamx.druid.indexer.partitions.PartitionsSpec;
import com.metamx.druid.indexer.updater.DbUpdaterJobSpec;
import com.metamx.druid.jackson.DefaultObjectMapper; import com.metamx.druid.jackson.DefaultObjectMapper;
import org.joda.time.Interval; import org.joda.time.Interval;
@ -39,8 +40,8 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ " \"granularitySpec\":{" + " \"granularitySpec\":{"
+ " \"type\":\"uniform\"," + " \"type\":\"uniform\","
+ " \"gran\":\"hour\"," + " \"gran\":\"hour\","
@ -74,7 +75,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ "\"segmentGranularity\":\"day\"," + "\"segmentGranularity\":\"day\","
+ "\"intervals\":[\"2012-02-01/P1D\"]" + "\"intervals\":[\"2012-02-01/P1D\"]"
@ -137,7 +138,7 @@ public class HadoopDruidIndexerConfigTest
public void testInvalidGranularityCombination() { public void testInvalidGranularityCombination() {
boolean thrown = false; boolean thrown = false;
try { try {
final HadoopDruidIndexerConfig cfg = jsonMapper.readValue( final HadoopDruidIndexerConfig cfg = jsonReadWriteRead(
"{" "{"
+ "\"segmentGranularity\":\"day\"," + "\"segmentGranularity\":\"day\","
+ "\"intervals\":[\"2012-02-01/P1D\"]," + "\"intervals\":[\"2012-02-01/P1D\"],"
@ -161,7 +162,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{}", "{}",
HadoopDruidIndexerConfig.class HadoopDruidIndexerConfig.class
); );
@ -183,7 +184,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ "\"partitionsSpec\":{" + "\"partitionsSpec\":{"
+ " \"targetPartitionSize\":100" + " \"targetPartitionSize\":100"
@ -221,7 +222,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ "\"partitionsSpec\":{" + "\"partitionsSpec\":{"
+ " \"targetPartitionSize\":100," + " \"targetPartitionSize\":100,"
@ -266,7 +267,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ "\"targetPartitionSize\":100," + "\"targetPartitionSize\":100,"
+ "\"partitionDimension\":\"foo\"" + "\"partitionDimension\":\"foo\""
@ -309,7 +310,7 @@ public class HadoopDruidIndexerConfigTest
final HadoopDruidIndexerConfig cfg; final HadoopDruidIndexerConfig cfg;
try { try {
cfg = jsonMapper.readValue( cfg = jsonReadWriteRead(
"{" "{"
+ "\"partitionsSpec\":{" + "\"partitionsSpec\":{"
+ " \"targetPartitionSize\":100," + " \"targetPartitionSize\":100,"
@ -354,7 +355,7 @@ public class HadoopDruidIndexerConfigTest
public void testInvalidPartitionsCombination() { public void testInvalidPartitionsCombination() {
boolean thrown = false; boolean thrown = false;
try { try {
final HadoopDruidIndexerConfig cfg = jsonMapper.readValue( final HadoopDruidIndexerConfig cfg = jsonReadWriteRead(
"{" "{"
+ "\"targetPartitionSize\":100," + "\"targetPartitionSize\":100,"
+ "\"partitionsSpec\":{" + "\"partitionsSpec\":{"
@ -369,4 +370,40 @@ public class HadoopDruidIndexerConfigTest
Assert.assertTrue("Exception thrown", thrown); Assert.assertTrue("Exception thrown", thrown);
} }
@Test
public void testDbUpdaterJobSpec() throws Exception
{
final HadoopDruidIndexerConfig cfg;
cfg = jsonReadWriteRead(
"{"
+ "\"updaterJobSpec\":{\n"
+ " \"type\" : \"db\",\n"
+ " \"connectURI\" : \"jdbc:mysql://localhost/druid\",\n"
+ " \"user\" : \"rofl\",\n"
+ " \"password\" : \"p4ssw0rd\",\n"
+ " \"segmentTable\" : \"segments\"\n"
+ " }"
+ "}",
HadoopDruidIndexerConfig.class
);
final DbUpdaterJobSpec spec = (DbUpdaterJobSpec) cfg.getUpdaterJobSpec();
Assert.assertEquals("segments", spec.getSegmentTable());
Assert.assertEquals("jdbc:mysql://localhost/druid", spec.getDatabaseConnectURI());
Assert.assertEquals("rofl", spec.getDatabaseUser());
Assert.assertEquals("p4ssw0rd", spec.getDatabasePassword());
Assert.assertEquals(false, spec.useValidationQuery());
}
private <T> T jsonReadWriteRead(String s, Class<T> klass)
{
try {
return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
} }