fix compilation

This commit is contained in:
nishantmonu51 2014-07-31 16:20:58 +05:30
parent 637bd35785
commit 32b9290723
2 changed files with 15 additions and 15 deletions

View File

@ -41,7 +41,7 @@ public class LoggingProgressIndicator extends AbstractProgressIndicator
public LoggingProgressIndicator(String progressName) public LoggingProgressIndicator(String progressName)
{ {
this.progressName = progressName; this.progressName = progressName;
this.global = new Stopwatch(); this.global = Stopwatch.createUnstarted();
} }
@Override @Override
@ -69,9 +69,8 @@ public class LoggingProgressIndicator extends AbstractProgressIndicator
if (sectionWatch != null) { if (sectionWatch != null) {
throw new ISE("[%s]: Cannot start progress tracker for [%s]. It is already started.", progressName, section); throw new ISE("[%s]: Cannot start progress tracker for [%s]. It is already started.", progressName, section);
} }
sectionWatch = new Stopwatch(); sectionWatch = Stopwatch.createStarted();
sections.put(section, sectionWatch); sections.put(section, sectionWatch);
sectionWatch.start();
} }
@Override @Override

View File

@ -33,6 +33,7 @@ import com.metamx.common.logger.Logger;
import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.TestQueryRunners;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory;
@ -133,7 +134,7 @@ public class SchemalessIndex
final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
if (theIndex == null) { if (theIndex == null) {
theIndex = new IncrementalIndex(timestamp, QueryGranularity.MINUTE, METRIC_AGGS); theIndex = new IncrementalIndex(timestamp, QueryGranularity.MINUTE, METRIC_AGGS, TestQueryRunners.pool);
} }
final List<String> dims = Lists.newArrayList(); final List<String> dims = Lists.newArrayList();
@ -178,11 +179,11 @@ public class SchemalessIndex
mergedFile.mkdirs(); mergedFile.mkdirs();
mergedFile.deleteOnExit(); mergedFile.deleteOnExit();
IndexMerger.persist(top, topFile); IndexMaker.persist(top, topFile);
IndexMerger.persist(bottom, bottomFile); IndexMaker.persist(bottom, bottomFile);
mergedIndex = io.druid.segment.IndexIO.loadIndex( mergedIndex = io.druid.segment.IndexIO.loadIndex(
IndexMerger.mergeQueryableIndex( IndexMaker.mergeQueryableIndex(
Arrays.asList(IndexIO.loadIndex(topFile), IndexIO.loadIndex(bottomFile)), METRIC_AGGS, mergedFile Arrays.asList(IndexIO.loadIndex(topFile), IndexIO.loadIndex(bottomFile)), METRIC_AGGS, mergedFile
) )
); );
@ -224,7 +225,7 @@ public class SchemalessIndex
mergedFile.deleteOnExit(); mergedFile.deleteOnExit();
QueryableIndex index = IndexIO.loadIndex( QueryableIndex index = IndexIO.loadIndex(
IndexMerger.mergeQueryableIndex( IndexMaker.mergeQueryableIndex(
Arrays.asList(rowPersistedIndexes.get(index1), rowPersistedIndexes.get(index2)), METRIC_AGGS, mergedFile Arrays.asList(rowPersistedIndexes.get(index1), rowPersistedIndexes.get(index2)), METRIC_AGGS, mergedFile
) )
); );
@ -261,7 +262,7 @@ public class SchemalessIndex
} }
QueryableIndex index = IndexIO.loadIndex( QueryableIndex index = IndexIO.loadIndex(
IndexMerger.mergeQueryableIndex(indexesToMerge, METRIC_AGGS, mergedFile) IndexMaker.mergeQueryableIndex(indexesToMerge, METRIC_AGGS, mergedFile)
); );
return index; return index;
@ -330,7 +331,7 @@ public class SchemalessIndex
} }
final IncrementalIndex rowIndex = new IncrementalIndex( final IncrementalIndex rowIndex = new IncrementalIndex(
timestamp, QueryGranularity.MINUTE, METRIC_AGGS timestamp, QueryGranularity.MINUTE, METRIC_AGGS, TestQueryRunners.pool
); );
rowIndex.add( rowIndex.add(
@ -342,7 +343,7 @@ public class SchemalessIndex
tmpFile.mkdirs(); tmpFile.mkdirs();
tmpFile.deleteOnExit(); tmpFile.deleteOnExit();
IndexMerger.persist(rowIndex, tmpFile); IndexMaker.persist(rowIndex, tmpFile);
rowPersistedIndexes.add(IndexIO.loadIndex(tmpFile)); rowPersistedIndexes.add(IndexIO.loadIndex(tmpFile));
} }
} }
@ -360,7 +361,7 @@ public class SchemalessIndex
log.info("Realtime loading index file[%s]", filename); log.info("Realtime loading index file[%s]", filename);
final IncrementalIndex retVal = new IncrementalIndex( final IncrementalIndex retVal = new IncrementalIndex(
new DateTime("2011-01-12T00:00:00.000Z").getMillis(), QueryGranularity.MINUTE, aggs new DateTime("2011-01-12T00:00:00.000Z").getMillis(), QueryGranularity.MINUTE, aggs, TestQueryRunners.pool
); );
try { try {
@ -402,7 +403,7 @@ public class SchemalessIndex
theFile.mkdirs(); theFile.mkdirs();
theFile.deleteOnExit(); theFile.deleteOnExit();
filesToMap.add(theFile); filesToMap.add(theFile);
IndexMerger.persist(index, theFile); IndexMaker.persist(index, theFile);
} }
return filesToMap; return filesToMap;
@ -462,7 +463,7 @@ public class SchemalessIndex
); );
} }
return IndexIO.loadIndex(IndexMerger.append(adapters, mergedFile)); return IndexIO.loadIndex(IndexMaker.append(adapters, mergedFile));
} }
catch (IOException e) { catch (IOException e) {
throw Throwables.propagate(e); throw Throwables.propagate(e);
@ -481,7 +482,7 @@ public class SchemalessIndex
List<File> filesToMap = makeFilesToMap(tmpFile, files); List<File> filesToMap = makeFilesToMap(tmpFile, files);
return IndexIO.loadIndex( return IndexIO.loadIndex(
IndexMerger.mergeQueryableIndex( IndexMaker.mergeQueryableIndex(
Lists.newArrayList( Lists.newArrayList(
Iterables.transform( Iterables.transform(
filesToMap, filesToMap,