1) Fix HadoopDruidIndexerConfigTest to actually verify the current correct behavior.

This commit is contained in:
cheddar 2013-08-05 11:37:20 -07:00
parent d6ac75cff5
commit 3c808b15c3
1 changed files with 6 additions and 6 deletions

View File

@ -26,13 +26,11 @@ import com.metamx.druid.indexer.granularity.UniformGranularitySpec;
import com.metamx.druid.indexer.partitions.PartitionsSpec; import com.metamx.druid.indexer.partitions.PartitionsSpec;
import com.metamx.druid.indexer.updater.DbUpdaterJobSpec; import com.metamx.druid.indexer.updater.DbUpdaterJobSpec;
import com.metamx.druid.jackson.DefaultObjectMapper; import com.metamx.druid.jackson.DefaultObjectMapper;
import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -440,13 +438,13 @@ public class HadoopDruidIndexerConfigTest
try { try {
cfg = jsonReadWriteRead( cfg = jsonReadWriteRead(
"{" "{"
+ "\"dataSource\": \"the:data:source\"," + "\"dataSource\": \"source\","
+ " \"granularitySpec\":{" + " \"granularitySpec\":{"
+ " \"type\":\"uniform\"," + " \"type\":\"uniform\","
+ " \"gran\":\"hour\"," + " \"gran\":\"hour\","
+ " \"intervals\":[\"2012-07-10/P1D\"]" + " \"intervals\":[\"2012-07-10/P1D\"]"
+ " }," + " },"
+ "\"segmentOutputPath\": \"/tmp/dru:id/data:test\"" + "\"segmentOutputPath\": \"hdfs://server:9100/tmp/druid/datatest\""
+ "}", + "}",
HadoopDruidIndexerConfig.class HadoopDruidIndexerConfig.class
); );
@ -458,8 +456,10 @@ public class HadoopDruidIndexerConfigTest
Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712); Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712);
Path path = cfg.makeSegmentOutputPath(new DistributedFileSystem(), bucket); Path path = cfg.makeSegmentOutputPath(new DistributedFileSystem(), bucket);
Assert.assertEquals("/tmp/dru_id/data_test/the_data_source/20120710T050000.000Z_20120710T060000.000Z/some_brand_new_version/4712", path.toString()); Assert.assertEquals(
"hdfs://server:9100/tmp/druid/datatest/source/20120710T050000.000Z_20120710T060000.000Z/some_brand_new_version/4712",
path.toString()
);
} }
@Test @Test