mirror of https://github.com/apache/druid.git
more cleanup
This commit is contained in:
parent
035b4c1af4
commit
b5d66381f3
|
@ -97,24 +97,4 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data
|
|||
log.info("Pull of file[%s] completed in %,d millis (%s bytes)", key, System.currentTimeMillis() - startTime,
|
||||
meta.getObjectSize());
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
String key = (String) segment.getLoadSpec().get("key");
|
||||
OperationResult<ColumnList<String>> result;
|
||||
try
|
||||
{
|
||||
result = this.keyspace.prepareQuery(descriptorStorage)
|
||||
.getKey(key)
|
||||
.execute();
|
||||
ColumnList<String> children = result.getResult();
|
||||
long lastModified = children.getColumnByName("lastmodified").getLongValue();
|
||||
log.info("Read lastModified for [%s] as [%d]", key, lastModified);
|
||||
return lastModified;
|
||||
} catch (ConnectionException e)
|
||||
{
|
||||
throw new SegmentLoadingException(e, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,20 +65,6 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
Path path = getPath(segment);
|
||||
FileSystem fs = checkPathAndGetFilesystem(path);
|
||||
|
||||
try {
|
||||
return fs.getFileStatus(path).getModificationTime();
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new SegmentLoadingException(e, "Problem loading status of path[%s]", path);
|
||||
}
|
||||
}
|
||||
|
||||
private Path getPath(DataSegment segment)
|
||||
{
|
||||
return new Path(String.valueOf(segment.getLoadSpec().get("path")));
|
||||
|
|
|
@ -322,7 +322,6 @@ public class IndexGeneratorJob implements Jobby
|
|||
Bucket bucket = Bucket.fromGroupKey(keyBytes.getGroupKey()).lhs;
|
||||
|
||||
final Interval interval = config.getGranularitySpec().bucketInterval(bucket.time).get();
|
||||
//final DataRollupSpec rollupSpec = config.getRollupSpec();
|
||||
final AggregatorFactory[] aggs = config.getSchema().getDataSchema().getAggregators();
|
||||
|
||||
IncrementalIndex index = makeIncrementalIndex(bucket, aggs);
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package io.druid.indexer.rollup;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Class uses public fields to work around http://jira.codehaus.org/browse/MSHADE-92
|
||||
*
|
||||
* Adjust to JsonCreator and final fields when resolved.
|
||||
*/
|
||||
@Deprecated
|
||||
public class DataRollupSpec
|
||||
{
|
||||
@JsonProperty
|
||||
public List<AggregatorFactory> aggs;
|
||||
|
||||
@JsonProperty
|
||||
public QueryGranularity rollupGranularity = QueryGranularity.NONE;
|
||||
|
||||
public List<AggregatorFactory> getAggs()
|
||||
{
|
||||
return aggs;
|
||||
}
|
||||
}
|
|
@ -93,27 +93,6 @@ public class OverlordResource
|
|||
this.configManager = configManager;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/merge")
|
||||
@Consumes("application/json")
|
||||
@Produces("application/json")
|
||||
@Deprecated
|
||||
public Response doMerge(final Task task)
|
||||
{
|
||||
// legacy endpoint
|
||||
return doIndex(task);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/index")
|
||||
@Consumes("application/json")
|
||||
@Produces("application/json")
|
||||
@Deprecated
|
||||
public Response doIndex(final Task task)
|
||||
{
|
||||
return taskPost(task);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/task")
|
||||
@Consumes("application/json")
|
||||
|
|
|
@ -168,31 +168,6 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
final S3Coords coords = new S3Coords(segment);
|
||||
try {
|
||||
final S3Object objDetails = S3Utils.retryS3Operation(
|
||||
new Callable<S3Object>()
|
||||
{
|
||||
@Override
|
||||
public S3Object call() throws Exception
|
||||
{
|
||||
return s3Client.getObjectDetails(new S3Bucket(coords.bucket), coords.path);
|
||||
}
|
||||
}
|
||||
);
|
||||
return objDetails.getLastModifiedDate().getTime();
|
||||
}
|
||||
catch (S3ServiceException | IOException e) {
|
||||
throw new SegmentLoadingException(e, e.getMessage());
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static class S3Coords
|
||||
{
|
||||
String bucket;
|
||||
|
|
|
@ -107,13 +107,4 @@ public class ArbitraryGranularitySpec implements GranularitySpec
|
|||
{
|
||||
return queryGranularity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GranularitySpec withQueryGranularity(QueryGranularity queryGranularity)
|
||||
{
|
||||
return new ArbitraryGranularitySpec(
|
||||
queryGranularity,
|
||||
Lists.newArrayList(intervals)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,4 @@ public interface GranularitySpec
|
|||
|
||||
public QueryGranularity getQueryGranularity();
|
||||
|
||||
@Deprecated
|
||||
public GranularitySpec withQueryGranularity(QueryGranularity queryGranularity);
|
||||
}
|
||||
|
|
|
@ -101,16 +101,6 @@ public class UniformGranularitySpec implements GranularitySpec
|
|||
return queryGranularity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GranularitySpec withQueryGranularity(QueryGranularity queryGranularity)
|
||||
{
|
||||
return new UniformGranularitySpec(
|
||||
segmentGranularity,
|
||||
queryGranularity,
|
||||
inputIntervals
|
||||
);
|
||||
}
|
||||
|
||||
@JsonProperty("intervals")
|
||||
public Optional<List<Interval>> getIntervals()
|
||||
{
|
||||
|
|
|
@ -73,24 +73,6 @@ public class LocalDataSegmentPuller implements DataSegmentPuller
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
final File file = getFile(segment);
|
||||
|
||||
long lastModified = Long.MAX_VALUE;
|
||||
if (file.isDirectory()) {
|
||||
for (File childFile : file.listFiles()) {
|
||||
lastModified = Math.min(childFile.lastModified(), lastModified);
|
||||
}
|
||||
}
|
||||
else {
|
||||
lastModified = file.lastModified();
|
||||
}
|
||||
|
||||
return lastModified;
|
||||
}
|
||||
|
||||
private File getFile(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
final Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||
|
|
Loading…
Reference in New Issue