From 31f5cb5c8f5fce499de2657c93994759c4ff699f Mon Sep 17 00:00:00 2001 From: Gian Merlino Date: Tue, 14 May 2013 14:39:29 -0700 Subject: [PATCH] DbConnectorConfig: Serialization is hard --- .../java/com/metamx/druid/db/DbConnector.java | 2 +- .../metamx/druid/db/DbConnectorConfig.java | 2 +- .../indexer/HadoopDruidIndexerConfigTest.java | 55 ++++++++++++++++--- 3 files changed, 48 insertions(+), 11 deletions(-) diff --git a/common/src/main/java/com/metamx/druid/db/DbConnector.java b/common/src/main/java/com/metamx/druid/db/DbConnector.java index 1e73b731353..cb202b1a8f2 100644 --- a/common/src/main/java/com/metamx/druid/db/DbConnector.java +++ b/common/src/main/java/com/metamx/druid/db/DbConnector.java @@ -181,7 +181,7 @@ public class DbConnector dataSource.setPassword(config.getDatabasePassword()); dataSource.setUrl(config.getDatabaseConnectURI()); - if (config.isValidationQuery()) { + if (config.useValidationQuery()) { dataSource.setValidationQuery(config.getValidationQuery()); dataSource.setTestOnBorrow(true); } diff --git a/common/src/main/java/com/metamx/druid/db/DbConnectorConfig.java b/common/src/main/java/com/metamx/druid/db/DbConnectorConfig.java index fb7d1a99916..e17302a3183 100644 --- a/common/src/main/java/com/metamx/druid/db/DbConnectorConfig.java +++ b/common/src/main/java/com/metamx/druid/db/DbConnectorConfig.java @@ -44,7 +44,7 @@ public abstract class DbConnectorConfig @JsonProperty("useValidationQuery") @Config("druid.database.validation") - public boolean isValidationQuery() { + public boolean useValidationQuery() { return false; } diff --git a/indexer/src/test/java/com/metamx/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexer/src/test/java/com/metamx/druid/indexer/HadoopDruidIndexerConfigTest.java index 5fdff8ce8b8..5745c3ab3e9 100644 --- a/indexer/src/test/java/com/metamx/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexer/src/test/java/com/metamx/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -24,6 +24,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.metamx.druid.indexer.granularity.UniformGranularitySpec; import com.metamx.druid.indexer.partitions.PartitionsSpec; +import com.metamx.druid.indexer.updater.DbUpdaterJobSpec; import com.metamx.druid.jackson.DefaultObjectMapper; import org.joda.time.Interval; @@ -39,8 +40,8 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( - "{" + cfg = jsonReadWriteRead( + "{" + " \"granularitySpec\":{" + " \"type\":\"uniform\"," + " \"gran\":\"hour\"," @@ -74,7 +75,7 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( + cfg = jsonReadWriteRead( "{" + "\"segmentGranularity\":\"day\"," + "\"intervals\":[\"2012-02-01/P1D\"]" @@ -137,7 +138,7 @@ public class HadoopDruidIndexerConfigTest public void testInvalidGranularityCombination() { boolean thrown = false; try { - final HadoopDruidIndexerConfig cfg = jsonMapper.readValue( + final HadoopDruidIndexerConfig cfg = jsonReadWriteRead( "{" + "\"segmentGranularity\":\"day\"," + "\"intervals\":[\"2012-02-01/P1D\"]," @@ -161,7 +162,7 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( + cfg = jsonReadWriteRead( "{}", HadoopDruidIndexerConfig.class ); @@ -183,7 +184,7 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( + cfg = jsonReadWriteRead( "{" + "\"partitionsSpec\":{" + " \"targetPartitionSize\":100" @@ -221,7 +222,7 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( + cfg = jsonReadWriteRead( "{" + "\"partitionsSpec\":{" + " \"targetPartitionSize\":100," @@ -260,7 +261,7 @@ public class HadoopDruidIndexerConfigTest final HadoopDruidIndexerConfig cfg; try { - cfg = jsonMapper.readValue( + cfg = jsonReadWriteRead( "{" + "\"targetPartitionSize\":100," + "\"partitionDimension\":\"foo\"" @@ -296,7 +297,7 @@ public class HadoopDruidIndexerConfigTest public void testInvalidPartitionsCombination() { boolean thrown = false; try { - final HadoopDruidIndexerConfig cfg = jsonMapper.readValue( + final HadoopDruidIndexerConfig cfg = jsonReadWriteRead( "{" + "\"targetPartitionSize\":100," + "\"partitionsSpec\":{" @@ -311,4 +312,40 @@ public class HadoopDruidIndexerConfigTest Assert.assertTrue("Exception thrown", thrown); } + + @Test + public void testDbUpdaterJobSpec() throws Exception + { + final HadoopDruidIndexerConfig cfg; + + cfg = jsonReadWriteRead( + "{" + + "\"updaterJobSpec\":{\n" + + " \"type\" : \"db\",\n" + + " \"connectURI\" : \"jdbc:mysql://localhost/druid\",\n" + + " \"user\" : \"rofl\",\n" + + " \"password\" : \"p4ssw0rd\",\n" + + " \"segmentTable\" : \"segments\"\n" + + " }" + + "}", + HadoopDruidIndexerConfig.class + ); + + final DbUpdaterJobSpec spec = (DbUpdaterJobSpec) cfg.getUpdaterJobSpec(); + Assert.assertEquals("segments", spec.getSegmentTable()); + Assert.assertEquals("jdbc:mysql://localhost/druid", spec.getDatabaseConnectURI()); + Assert.assertEquals("rofl", spec.getDatabaseUser()); + Assert.assertEquals("p4ssw0rd", spec.getDatabasePassword()); + Assert.assertEquals(false, spec.useValidationQuery()); + } + + private T jsonReadWriteRead(String s, Class klass) + { + try { + return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } }