mirror of
https://github.com/apache/druid.git
synced 2025-02-09 03:24:55 +00:00
fixed merge conflicts
This commit is contained in:
commit
c4397bd65a
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -24,6 +24,7 @@ import com.google.common.base.Throwables;
|
|||||||
import com.google.common.collect.Ordering;
|
import com.google.common.collect.Ordering;
|
||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
import com.metamx.common.guava.Accumulator;
|
import com.metamx.common.guava.Accumulator;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.Sequence;
|
import com.metamx.common.guava.Sequence;
|
||||||
import com.metamx.common.guava.Yielder;
|
import com.metamx.common.guava.Yielder;
|
||||||
import com.metamx.common.guava.Yielders;
|
import com.metamx.common.guava.Yielders;
|
||||||
@ -70,7 +71,7 @@ public class OrderedMergeSequence<T> implements Sequence<T>
|
|||||||
return yielder.get();
|
return yielder.get();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(yielder);
|
CloseQuietly.close(yielder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,14 +46,9 @@ public class SerializerUtils
|
|||||||
|
|
||||||
public void writeString(OutputSupplier<? extends OutputStream> supplier, String name) throws IOException
|
public void writeString(OutputSupplier<? extends OutputStream> supplier, String name) throws IOException
|
||||||
{
|
{
|
||||||
OutputStream out = null;
|
try (OutputStream out = supplier.getOutput()) {
|
||||||
try {
|
|
||||||
out = supplier.getOutput();
|
|
||||||
writeString(out, name);
|
writeString(out, name);
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(out);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeString(WritableByteChannel out, String name) throws IOException
|
public void writeString(WritableByteChannel out, String name) throws IOException
|
||||||
|
@ -69,10 +69,9 @@ A sample worker setup spec is shown below:
|
|||||||
"keyName":"keyName"
|
"keyName":"keyName"
|
||||||
},
|
},
|
||||||
"userData":{
|
"userData":{
|
||||||
"classType":"galaxy",
|
"impl":"string",
|
||||||
"env":"druid",
|
"data":"version=:VERSION:",
|
||||||
"version":"druid_version",
|
"versionReplacementString":":VERSION:"
|
||||||
"type":"sample_cluster/worker"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -81,8 +80,8 @@ Issuing a GET request at the same URL will return the current worker setup spec
|
|||||||
|
|
||||||
|Property|Description|Default|
|
|Property|Description|Default|
|
||||||
|--------|-----------|-------|
|
|--------|-----------|-------|
|
||||||
|`minVersion`|The coordinator only assigns tasks to workers with a version greater than the minVersion. If this is not specified, the minVersion will be the same as the coordinator version.|none|
|
|`minVersion`|The coordinator only assigns tasks to workers with a version greater than the minVersion. If this is not specified, the minVersion will be druid.indexer.runner.minWorkerVersion.|none|
|
||||||
|`minNumWorkers`|The minimum number of workers that can be in the cluster at any given time.|0|
|
|`minNumWorkers`|The minimum number of workers that can be in the cluster at any given time.|0|
|
||||||
|`maxNumWorkers`|The maximum number of workers that can be in the cluster at any given time.|0|
|
|`maxNumWorkers`|The maximum number of workers that can be in the cluster at any given time.|0|
|
||||||
|`nodeData`|A JSON object that contains metadata about new nodes to create.|none|
|
|`nodeData`|A JSON object that describes how to launch new nodes. Currently, only EC2 is supported.|none; required|
|
||||||
|`userData`|A JSON object that contains metadata about how the node should register itself on startup. This data is sent with node creation requests.|none|
|
|`userData`|A JSON object that describes how to configure new nodes. Currently, only EC2 is supported. If you have set druid.indexer.autoscale.workerVersion, this must have a versionReplacementString. Otherwise, a versionReplacementString is not necessary.|none; optional|
|
||||||
|
@ -22,6 +22,7 @@ Additional peon configs include:
|
|||||||
|`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks|
|
|`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks|
|
||||||
|`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing|
|
|`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing|
|
||||||
|`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000|
|
|`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000|
|
||||||
|
|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.3.0|
|
||||||
|`druid.indexer.task.chathandler.type`|Choices are "noop" and "announce". Certain tasks will use service discovery to announce an HTTP endpoint that events can be posted to.|noop|
|
|`druid.indexer.task.chathandler.type`|Choices are "noop" and "announce". Certain tasks will use service discovery to announce an HTTP endpoint that events can be posted to.|noop|
|
||||||
|
|
||||||
If the peon is running in remote mode, there must be an overlord up and running. Running peons in remote mode require the following configurations:
|
If the peon is running in remote mode, there must be an overlord up and running. Running peons in remote mode require the following configurations:
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -21,6 +21,7 @@ package io.druid.storage.hdfs;
|
|||||||
|
|
||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.segment.loading.DataSegmentPuller;
|
import io.druid.segment.loading.DataSegmentPuller;
|
||||||
import io.druid.segment.loading.SegmentLoadingException;
|
import io.druid.segment.loading.SegmentLoadingException;
|
||||||
import io.druid.timeline.DataSegment;
|
import io.druid.timeline.DataSegment;
|
||||||
@ -52,22 +53,17 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller
|
|||||||
|
|
||||||
final FileSystem fs = checkPathAndGetFilesystem(path);
|
final FileSystem fs = checkPathAndGetFilesystem(path);
|
||||||
|
|
||||||
FSDataInputStream in = null;
|
if (path.getName().endsWith(".zip")) {
|
||||||
try {
|
try {
|
||||||
if (path.getName().endsWith(".zip")) {
|
try (FSDataInputStream in = fs.open(path)) {
|
||||||
in = fs.open(path);
|
CompressionUtils.unzip(in, dir);
|
||||||
CompressionUtils.unzip(in, dir);
|
}
|
||||||
in.close();
|
|
||||||
}
|
}
|
||||||
else {
|
catch (IOException e) {
|
||||||
throw new SegmentLoadingException("Unknown file type[%s]", path);
|
throw new SegmentLoadingException(e, "Some IOException");
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
catch (IOException e) {
|
throw new SegmentLoadingException("Unknown file type[%s]", path);
|
||||||
throw new SegmentLoadingException(e, "Some IOException");
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(in);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,7 +81,8 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Path getPath(DataSegment segment) {
|
private Path getPath(DataSegment segment)
|
||||||
|
{
|
||||||
return new Path(String.valueOf(segment.getLoadSpec().get("path")));
|
return new Path(String.valueOf(segment.getLoadSpec().get("path")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ import com.google.common.io.ByteStreams;
|
|||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
import com.google.common.io.OutputSupplier;
|
import com.google.common.io.OutputSupplier;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.segment.SegmentUtils;
|
import io.druid.segment.SegmentUtils;
|
||||||
import io.druid.segment.loading.DataSegmentPusher;
|
import io.druid.segment.loading.DataSegmentPusher;
|
||||||
@ -78,17 +79,10 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
|
|||||||
|
|
||||||
fs.mkdirs(outFile.getParent());
|
fs.mkdirs(outFile.getParent());
|
||||||
log.info("Compressing files from[%s] to [%s]", inDir, outFile);
|
log.info("Compressing files from[%s] to [%s]", inDir, outFile);
|
||||||
FSDataOutputStream out = null;
|
|
||||||
long size;
|
long size;
|
||||||
try {
|
try (FSDataOutputStream out = fs.create(outFile)) {
|
||||||
out = fs.create(outFile);
|
|
||||||
|
|
||||||
size = CompressionUtils.zip(inDir, out);
|
size = CompressionUtils.zip(inDir, out);
|
||||||
|
|
||||||
out.close();
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(out);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return createDescriptorFile(
|
return createDescriptorFile(
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -31,6 +31,7 @@ import com.google.common.io.Closeables;
|
|||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.data.input.InputRow;
|
import io.druid.data.input.InputRow;
|
||||||
import io.druid.data.input.impl.StringInputRowParser;
|
import io.druid.data.input.impl.StringInputRowParser;
|
||||||
@ -425,7 +426,7 @@ public class IndexGeneratorJob implements Jobby
|
|||||||
if (caughtException == null) {
|
if (caughtException == null) {
|
||||||
Closeables.close(out, false);
|
Closeables.close(out, false);
|
||||||
} else {
|
} else {
|
||||||
Closeables.closeQuietly(out);
|
CloseQuietly.close(out);
|
||||||
throw Throwables.propagate(caughtException);
|
throw Throwables.propagate(caughtException);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -605,7 +606,7 @@ public class IndexGeneratorJob implements Jobby
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(in);
|
CloseQuietly.close(in);
|
||||||
}
|
}
|
||||||
out.closeEntry();
|
out.closeEntry();
|
||||||
context.progress();
|
context.progress();
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -21,11 +21,17 @@ package io.druid.indexing.common.config;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class TaskConfig
|
public class TaskConfig
|
||||||
{
|
{
|
||||||
|
public static List<String> DEFAULT_DEFAULT_HADOOP_COORDINATES = ImmutableList.of(
|
||||||
|
"org.apache.hadoop:hadoop-client:2.3.0"
|
||||||
|
);
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
private final String baseDir;
|
private final String baseDir;
|
||||||
|
|
||||||
@ -38,40 +44,57 @@ public class TaskConfig
|
|||||||
@JsonProperty
|
@JsonProperty
|
||||||
private final int defaultRowFlushBoundary;
|
private final int defaultRowFlushBoundary;
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
private final List<String> defaultHadoopCoordinates;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public TaskConfig(
|
public TaskConfig(
|
||||||
@JsonProperty("baseDir") String baseDir,
|
@JsonProperty("baseDir") String baseDir,
|
||||||
@JsonProperty("baseTaskDir") String baseTaskDir,
|
@JsonProperty("baseTaskDir") String baseTaskDir,
|
||||||
@JsonProperty("hadoopWorkingPath") String hadoopWorkingPath,
|
@JsonProperty("hadoopWorkingPath") String hadoopWorkingPath,
|
||||||
@JsonProperty("defaultRowFlushBoundary") Integer defaultRowFlushBoundary
|
@JsonProperty("defaultRowFlushBoundary") Integer defaultRowFlushBoundary,
|
||||||
|
@JsonProperty("defaultHadoopCoordinates") List<String> defaultHadoopCoordinates
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.baseDir = baseDir == null ? "/tmp" : baseDir;
|
this.baseDir = baseDir == null ? "/tmp" : baseDir;
|
||||||
this.baseTaskDir = new File(defaultDir(baseTaskDir, "persistent/task"));
|
this.baseTaskDir = new File(defaultDir(baseTaskDir, "persistent/task"));
|
||||||
this.hadoopWorkingPath = defaultDir(hadoopWorkingPath, "druid-indexing");
|
this.hadoopWorkingPath = defaultDir(hadoopWorkingPath, "druid-indexing");
|
||||||
this.defaultRowFlushBoundary = defaultRowFlushBoundary == null ? 500000 : defaultRowFlushBoundary;
|
this.defaultRowFlushBoundary = defaultRowFlushBoundary == null ? 500000 : defaultRowFlushBoundary;
|
||||||
|
this.defaultHadoopCoordinates = defaultHadoopCoordinates == null
|
||||||
|
? DEFAULT_DEFAULT_HADOOP_COORDINATES
|
||||||
|
: defaultHadoopCoordinates;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
public String getBaseDir()
|
public String getBaseDir()
|
||||||
{
|
{
|
||||||
return baseDir;
|
return baseDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
public File getBaseTaskDir()
|
public File getBaseTaskDir()
|
||||||
{
|
{
|
||||||
return baseTaskDir;
|
return baseTaskDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
public String getHadoopWorkingPath()
|
public String getHadoopWorkingPath()
|
||||||
{
|
{
|
||||||
return hadoopWorkingPath;
|
return hadoopWorkingPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
public int getDefaultRowFlushBoundary()
|
public int getDefaultRowFlushBoundary()
|
||||||
{
|
{
|
||||||
return defaultRowFlushBoundary;
|
return defaultRowFlushBoundary;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public List<String> getDefaultHadoopCoordinates()
|
||||||
|
{
|
||||||
|
return defaultHadoopCoordinates;
|
||||||
|
}
|
||||||
|
|
||||||
private String defaultDir(String configParameter, final String defaultVal)
|
private String defaultDir(String configParameter, final String defaultVal)
|
||||||
{
|
{
|
||||||
if (configParameter == null) {
|
if (configParameter == null) {
|
||||||
@ -80,4 +103,4 @@ public class TaskConfig
|
|||||||
|
|
||||||
return configParameter;
|
return configParameter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,7 @@ import com.google.api.client.util.Lists;
|
|||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Optional;
|
import com.google.common.base.Optional;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.common.utils.JodaUtils;
|
import io.druid.common.utils.JodaUtils;
|
||||||
@ -65,8 +66,6 @@ public class HadoopIndexTask extends AbstractTask
|
|||||||
extensionsConfig = Initialization.makeStartupInjector().getInstance(ExtensionsConfig.class);
|
extensionsConfig = Initialization.makeStartupInjector().getInstance(ExtensionsConfig.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String DEFAULT_HADOOP_COORDINATES = "org.apache.hadoop:hadoop-client:2.3.0";
|
|
||||||
|
|
||||||
private static String getTheDataSource(HadoopIngestionSpec spec, HadoopIngestionSpec config)
|
private static String getTheDataSource(HadoopIngestionSpec spec, HadoopIngestionSpec config)
|
||||||
{
|
{
|
||||||
if (spec != null) {
|
if (spec != null) {
|
||||||
@ -115,9 +114,14 @@ public class HadoopIndexTask extends AbstractTask
|
|||||||
Preconditions.checkArgument(this.spec.getTuningConfig().getWorkingPath() == null, "workingPath must be absent");
|
Preconditions.checkArgument(this.spec.getTuningConfig().getWorkingPath() == null, "workingPath must be absent");
|
||||||
Preconditions.checkArgument(this.spec.getIOConfig().getMetadataUpdateSpec() == null, "updaterJobSpec must be absent");
|
Preconditions.checkArgument(this.spec.getIOConfig().getMetadataUpdateSpec() == null, "updaterJobSpec must be absent");
|
||||||
|
|
||||||
this.hadoopDependencyCoordinates = hadoopDependencyCoordinates == null ? Arrays.<String>asList(
|
if (hadoopDependencyCoordinates != null) {
|
||||||
hadoopCoordinates == null ? DEFAULT_HADOOP_COORDINATES : hadoopCoordinates
|
this.hadoopDependencyCoordinates = hadoopDependencyCoordinates;
|
||||||
) : hadoopDependencyCoordinates;
|
} else if (hadoopCoordinates != null) {
|
||||||
|
this.hadoopDependencyCoordinates = ImmutableList.of(hadoopCoordinates);
|
||||||
|
} else {
|
||||||
|
// Will be defaulted to something at runtime, based on taskConfig.
|
||||||
|
this.hadoopDependencyCoordinates = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -158,6 +162,10 @@ public class HadoopIndexTask extends AbstractTask
|
|||||||
@Override
|
@Override
|
||||||
public TaskStatus run(TaskToolbox toolbox) throws Exception
|
public TaskStatus run(TaskToolbox toolbox) throws Exception
|
||||||
{
|
{
|
||||||
|
final List<String> finalHadoopDependencyCoordinates = hadoopDependencyCoordinates != null
|
||||||
|
? hadoopDependencyCoordinates
|
||||||
|
: toolbox.getConfig().getDefaultHadoopCoordinates();
|
||||||
|
|
||||||
final DefaultTeslaAether aetherClient = Initialization.getAetherClient(extensionsConfig);
|
final DefaultTeslaAether aetherClient = Initialization.getAetherClient(extensionsConfig);
|
||||||
|
|
||||||
final List<URL> extensionURLs = Lists.newArrayList();
|
final List<URL> extensionURLs = Lists.newArrayList();
|
||||||
@ -174,7 +182,7 @@ public class HadoopIndexTask extends AbstractTask
|
|||||||
final List<URL> driverURLs = Lists.newArrayList();
|
final List<URL> driverURLs = Lists.newArrayList();
|
||||||
driverURLs.addAll(nonHadoopURLs);
|
driverURLs.addAll(nonHadoopURLs);
|
||||||
// put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts
|
// put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts
|
||||||
for (String hadoopDependencyCoordinate : hadoopDependencyCoordinates) {
|
for (String hadoopDependencyCoordinate : finalHadoopDependencyCoordinates) {
|
||||||
final ClassLoader hadoopLoader = Initialization.getClassLoaderForCoordinates(
|
final ClassLoader hadoopLoader = Initialization.getClassLoaderForCoordinates(
|
||||||
aetherClient, hadoopDependencyCoordinate
|
aetherClient, hadoopDependencyCoordinate
|
||||||
);
|
);
|
||||||
|
@ -24,9 +24,9 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
|||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.metamx.common.Granularity;
|
import com.metamx.common.Granularity;
|
||||||
import com.metamx.common.exception.FormattedException;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
|
import com.metamx.common.parsers.ParseException;
|
||||||
import com.metamx.emitter.EmittingLogger;
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import io.druid.data.input.Firehose;
|
import io.druid.data.input.Firehose;
|
||||||
import io.druid.data.input.FirehoseFactory;
|
import io.druid.data.input.FirehoseFactory;
|
||||||
@ -44,8 +44,8 @@ import io.druid.query.QueryRunnerFactory;
|
|||||||
import io.druid.query.QueryRunnerFactoryConglomerate;
|
import io.druid.query.QueryRunnerFactoryConglomerate;
|
||||||
import io.druid.query.QueryToolChest;
|
import io.druid.query.QueryToolChest;
|
||||||
import io.druid.segment.indexing.DataSchema;
|
import io.druid.segment.indexing.DataSchema;
|
||||||
import io.druid.segment.indexing.RealtimeTuningConfig;
|
|
||||||
import io.druid.segment.indexing.RealtimeIOConfig;
|
import io.druid.segment.indexing.RealtimeIOConfig;
|
||||||
|
import io.druid.segment.indexing.RealtimeTuningConfig;
|
||||||
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
|
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
|
||||||
import io.druid.segment.realtime.FireDepartment;
|
import io.druid.segment.realtime.FireDepartment;
|
||||||
import io.druid.segment.realtime.FireDepartmentConfig;
|
import io.druid.segment.realtime.FireDepartmentConfig;
|
||||||
@ -353,7 +353,7 @@ public class RealtimeIndexTask extends AbstractTask
|
|||||||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (FormattedException e) {
|
catch (ParseException e) {
|
||||||
log.warn(e, "unparseable line");
|
log.warn(e, "unparseable line");
|
||||||
fireDepartment.getMetrics().incrementUnparseable();
|
fireDepartment.getMetrics().incrementUnparseable();
|
||||||
}
|
}
|
||||||
@ -375,7 +375,7 @@ public class RealtimeIndexTask extends AbstractTask
|
|||||||
log.makeAlert(e, "Failed to finish realtime task").emit();
|
log.makeAlert(e, "Failed to finish realtime task").emit();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(firehose);
|
CloseQuietly.close(firehose);
|
||||||
toolbox.getMonitorScheduler().removeMonitor(metricsMonitor);
|
toolbox.getMonitorScheduler().removeMonitor(metricsMonitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -585,7 +585,7 @@ public class RemoteTaskRunner implements TaskRunner, TaskLogStreamer
|
|||||||
|
|
||||||
// Syncing state with Zookeeper - don't assign new tasks until the task we just assigned is actually running
|
// Syncing state with Zookeeper - don't assign new tasks until the task we just assigned is actually running
|
||||||
// on a worker - this avoids overflowing a worker with tasks
|
// on a worker - this avoids overflowing a worker with tasks
|
||||||
Stopwatch timeoutStopwatch = new Stopwatch();
|
Stopwatch timeoutStopwatch = Stopwatch.createUnstarted();
|
||||||
timeoutStopwatch.start();
|
timeoutStopwatch.start();
|
||||||
synchronized (statusLock) {
|
synchronized (statusLock) {
|
||||||
while (!isWorkerRunningTask(theWorker, task.getId())) {
|
while (!isWorkerRunningTask(theWorker, task.getId())) {
|
||||||
|
@ -29,19 +29,14 @@ import com.amazonaws.services.ec2.model.Reservation;
|
|||||||
import com.amazonaws.services.ec2.model.RunInstancesRequest;
|
import com.amazonaws.services.ec2.model.RunInstancesRequest;
|
||||||
import com.amazonaws.services.ec2.model.RunInstancesResult;
|
import com.amazonaws.services.ec2.model.RunInstancesResult;
|
||||||
import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
|
import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Supplier;
|
import com.google.common.base.Supplier;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.metamx.emitter.EmittingLogger;
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import io.druid.guice.annotations.Json;
|
|
||||||
import io.druid.indexing.overlord.setup.EC2NodeData;
|
import io.druid.indexing.overlord.setup.EC2NodeData;
|
||||||
import io.druid.indexing.overlord.setup.GalaxyUserData;
|
|
||||||
import io.druid.indexing.overlord.setup.WorkerSetupData;
|
import io.druid.indexing.overlord.setup.WorkerSetupData;
|
||||||
import org.apache.commons.codec.binary.Base64;
|
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -50,20 +45,17 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
|
|||||||
{
|
{
|
||||||
private static final EmittingLogger log = new EmittingLogger(EC2AutoScalingStrategy.class);
|
private static final EmittingLogger log = new EmittingLogger(EC2AutoScalingStrategy.class);
|
||||||
|
|
||||||
private final ObjectMapper jsonMapper;
|
|
||||||
private final AmazonEC2 amazonEC2Client;
|
private final AmazonEC2 amazonEC2Client;
|
||||||
private final SimpleResourceManagementConfig config;
|
private final SimpleResourceManagementConfig config;
|
||||||
private final Supplier<WorkerSetupData> workerSetupDataRef;
|
private final Supplier<WorkerSetupData> workerSetupDataRef;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public EC2AutoScalingStrategy(
|
public EC2AutoScalingStrategy(
|
||||||
@Json ObjectMapper jsonMapper,
|
|
||||||
AmazonEC2 amazonEC2Client,
|
AmazonEC2 amazonEC2Client,
|
||||||
SimpleResourceManagementConfig config,
|
SimpleResourceManagementConfig config,
|
||||||
Supplier<WorkerSetupData> workerSetupDataRef
|
Supplier<WorkerSetupData> workerSetupDataRef
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.jsonMapper = jsonMapper;
|
|
||||||
this.amazonEC2Client = amazonEC2Client;
|
this.amazonEC2Client = amazonEC2Client;
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.workerSetupDataRef = workerSetupDataRef;
|
this.workerSetupDataRef = workerSetupDataRef;
|
||||||
@ -73,15 +65,21 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
|
|||||||
public AutoScalingData provision()
|
public AutoScalingData provision()
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
WorkerSetupData setupData = workerSetupDataRef.get();
|
final WorkerSetupData setupData = workerSetupDataRef.get();
|
||||||
EC2NodeData workerConfig = setupData.getNodeData();
|
final EC2NodeData workerConfig = setupData.getNodeData();
|
||||||
|
final String userDataBase64;
|
||||||
|
|
||||||
GalaxyUserData userData = setupData.getUserData();
|
if (setupData.getUserData() == null) {
|
||||||
if (config.getWorkerVersion() != null) {
|
userDataBase64 = null;
|
||||||
userData = userData.withVersion(config.getWorkerVersion());
|
} else {
|
||||||
|
if (config.getWorkerVersion() == null) {
|
||||||
|
userDataBase64 = setupData.getUserData().getUserDataBase64();
|
||||||
|
} else {
|
||||||
|
userDataBase64 = setupData.getUserData().withVersion(config.getWorkerVersion()).getUserDataBase64();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
RunInstancesResult result = amazonEC2Client.runInstances(
|
final RunInstancesResult result = amazonEC2Client.runInstances(
|
||||||
new RunInstancesRequest(
|
new RunInstancesRequest(
|
||||||
workerConfig.getAmiId(),
|
workerConfig.getAmiId(),
|
||||||
workerConfig.getMinInstances(),
|
workerConfig.getMinInstances(),
|
||||||
@ -91,16 +89,10 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
|
|||||||
.withSecurityGroupIds(workerConfig.getSecurityGroupIds())
|
.withSecurityGroupIds(workerConfig.getSecurityGroupIds())
|
||||||
.withPlacement(new Placement(setupData.getAvailabilityZone()))
|
.withPlacement(new Placement(setupData.getAvailabilityZone()))
|
||||||
.withKeyName(workerConfig.getKeyName())
|
.withKeyName(workerConfig.getKeyName())
|
||||||
.withUserData(
|
.withUserData(userDataBase64)
|
||||||
Base64.encodeBase64String(
|
|
||||||
jsonMapper.writeValueAsBytes(
|
|
||||||
userData
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
List<String> instanceIds = Lists.transform(
|
final List<String> instanceIds = Lists.transform(
|
||||||
result.getReservation().getInstances(),
|
result.getReservation().getInstances(),
|
||||||
new Function<Instance, String>()
|
new Function<Instance, String>()
|
||||||
{
|
{
|
||||||
|
@ -0,0 +1,42 @@
|
|||||||
|
/*
|
||||||
|
* Druid - a distributed column store.
|
||||||
|
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||||
|
*
|
||||||
|
* This program is free software; you can redistribute it and/or
|
||||||
|
* modify it under the terms of the GNU General Public License
|
||||||
|
* as published by the Free Software Foundation; either version 2
|
||||||
|
* of the License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License
|
||||||
|
* along with this program; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.druid.indexing.overlord.setup;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents any user data that may be needed to launch EC2 instances.
|
||||||
|
*/
|
||||||
|
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "impl", defaultImpl = GalaxyEC2UserData.class)
|
||||||
|
@JsonSubTypes(value = {
|
||||||
|
@JsonSubTypes.Type(name = "galaxy", value = GalaxyEC2UserData.class),
|
||||||
|
@JsonSubTypes.Type(name = "string", value = StringEC2UserData.class)
|
||||||
|
})
|
||||||
|
public interface EC2UserData<T extends EC2UserData>
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Return a copy of this instance with a different worker version. If no changes are needed (possibly because the
|
||||||
|
* user data does not depend on the worker version) then it is OK to return "this".
|
||||||
|
*/
|
||||||
|
public EC2UserData<T> withVersion(String version);
|
||||||
|
|
||||||
|
public String getUserDataBase64();
|
||||||
|
}
|
@ -19,24 +19,32 @@
|
|||||||
|
|
||||||
package io.druid.indexing.overlord.setup;
|
package io.druid.indexing.overlord.setup;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JacksonInject;
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.api.client.repackaged.com.google.common.base.Throwables;
|
||||||
|
import io.druid.guice.annotations.Json;
|
||||||
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class GalaxyUserData
|
public class GalaxyEC2UserData implements EC2UserData<GalaxyEC2UserData>
|
||||||
{
|
{
|
||||||
public final String env;
|
private final ObjectMapper jsonMapper;
|
||||||
public final String version;
|
private final String env;
|
||||||
public final String type;
|
private final String version;
|
||||||
|
private final String type;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public GalaxyUserData(
|
public GalaxyEC2UserData(
|
||||||
|
@JacksonInject @Json ObjectMapper jsonMapper,
|
||||||
@JsonProperty("env") String env,
|
@JsonProperty("env") String env,
|
||||||
@JsonProperty("version") String version,
|
@JsonProperty("version") String version,
|
||||||
@JsonProperty("type") String type
|
@JsonProperty("type") String type
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
this.jsonMapper = jsonMapper;
|
||||||
this.env = env;
|
this.env = env;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
@ -60,9 +68,21 @@ public class GalaxyUserData
|
|||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
public GalaxyUserData withVersion(String ver)
|
@Override
|
||||||
|
public GalaxyEC2UserData withVersion(String ver)
|
||||||
{
|
{
|
||||||
return new GalaxyUserData(env, ver, type);
|
return new GalaxyEC2UserData(jsonMapper, env, ver, type);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getUserDataBase64()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return Base64.encodeBase64String(jsonMapper.writeValueAsBytes(this));
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
@ -0,0 +1,90 @@
|
|||||||
|
/*
|
||||||
|
* Druid - a distributed column store.
|
||||||
|
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||||
|
*
|
||||||
|
* This program is free software; you can redistribute it and/or
|
||||||
|
* modify it under the terms of the GNU General Public License
|
||||||
|
* as published by the Free Software Foundation; either version 2
|
||||||
|
* of the License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License
|
||||||
|
* along with this program; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.druid.indexing.overlord.setup;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.api.client.util.Charsets;
|
||||||
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
|
||||||
|
public class StringEC2UserData implements EC2UserData<StringEC2UserData>
|
||||||
|
{
|
||||||
|
private final String data;
|
||||||
|
private final String versionReplacementString;
|
||||||
|
private final String version;
|
||||||
|
|
||||||
|
@JsonCreator
|
||||||
|
public StringEC2UserData(
|
||||||
|
@JsonProperty("data") String data,
|
||||||
|
@JsonProperty("versionReplacementString") String versionReplacementString,
|
||||||
|
@JsonProperty("version") String version
|
||||||
|
)
|
||||||
|
{
|
||||||
|
this.data = data;
|
||||||
|
this.versionReplacementString = versionReplacementString;
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getData()
|
||||||
|
{
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getVersionReplacementString()
|
||||||
|
{
|
||||||
|
return versionReplacementString;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getVersion()
|
||||||
|
{
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public StringEC2UserData withVersion(final String _version)
|
||||||
|
{
|
||||||
|
return new StringEC2UserData(data, versionReplacementString, _version);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getUserDataBase64()
|
||||||
|
{
|
||||||
|
final String finalData;
|
||||||
|
if (versionReplacementString != null && version != null) {
|
||||||
|
finalData = data.replace(versionReplacementString, version);
|
||||||
|
} else {
|
||||||
|
finalData = data;
|
||||||
|
}
|
||||||
|
return Base64.encodeBase64String(finalData.getBytes(Charsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString()
|
||||||
|
{
|
||||||
|
return "StringEC2UserData{" +
|
||||||
|
"data='" + data + '\'' +
|
||||||
|
", versionReplacementString='" + versionReplacementString + '\'' +
|
||||||
|
", version='" + version + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
@ -33,7 +33,7 @@ public class WorkerSetupData
|
|||||||
private final int maxNumWorkers;
|
private final int maxNumWorkers;
|
||||||
private final String availabilityZone;
|
private final String availabilityZone;
|
||||||
private final EC2NodeData nodeData;
|
private final EC2NodeData nodeData;
|
||||||
private final GalaxyUserData userData;
|
private final EC2UserData userData;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public WorkerSetupData(
|
public WorkerSetupData(
|
||||||
@ -42,7 +42,7 @@ public class WorkerSetupData
|
|||||||
@JsonProperty("maxNumWorkers") int maxNumWorkers,
|
@JsonProperty("maxNumWorkers") int maxNumWorkers,
|
||||||
@JsonProperty("availabilityZone") String availabilityZone,
|
@JsonProperty("availabilityZone") String availabilityZone,
|
||||||
@JsonProperty("nodeData") EC2NodeData nodeData,
|
@JsonProperty("nodeData") EC2NodeData nodeData,
|
||||||
@JsonProperty("userData") GalaxyUserData userData
|
@JsonProperty("userData") EC2UserData userData
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.minVersion = minVersion;
|
this.minVersion = minVersion;
|
||||||
@ -84,7 +84,7 @@ public class WorkerSetupData
|
|||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public GalaxyUserData getUserData()
|
public EC2UserData getUserData()
|
||||||
{
|
{
|
||||||
return userData;
|
return userData;
|
||||||
}
|
}
|
||||||
|
@ -19,19 +19,51 @@
|
|||||||
|
|
||||||
package io.druid.indexing.common;
|
package io.druid.indexing.common;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.BeanProperty;
|
||||||
|
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||||
|
import com.fasterxml.jackson.databind.InjectableValues;
|
||||||
|
import com.fasterxml.jackson.databind.Module;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.common.base.Stopwatch;
|
import com.google.common.base.Stopwatch;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import io.druid.guice.ServerModule;
|
||||||
|
import io.druid.jackson.DefaultObjectMapper;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class TestUtils
|
public class TestUtils
|
||||||
{
|
{
|
||||||
|
public static final ObjectMapper MAPPER = new DefaultObjectMapper();
|
||||||
|
|
||||||
|
static {
|
||||||
|
final List<? extends Module> list = new ServerModule().getJacksonModules();
|
||||||
|
for (Module module : list) {
|
||||||
|
MAPPER.registerModule(module);
|
||||||
|
}
|
||||||
|
MAPPER.setInjectableValues(
|
||||||
|
new InjectableValues()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Object findInjectableValue(
|
||||||
|
Object valueId, DeserializationContext ctxt, BeanProperty forProperty, Object beanInstance
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (valueId.equals("com.fasterxml.jackson.databind.ObjectMapper")) {
|
||||||
|
return TestUtils.MAPPER;
|
||||||
|
}
|
||||||
|
throw new ISE("No Injectable value found");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
public static boolean conditionValid(IndexingServiceCondition condition)
|
public static boolean conditionValid(IndexingServiceCondition condition)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
Stopwatch stopwatch = new Stopwatch();
|
Stopwatch stopwatch = Stopwatch.createUnstarted();
|
||||||
stopwatch.start();
|
stopwatch.start();
|
||||||
while (!condition.isValid()) {
|
while (!condition.isValid()) {
|
||||||
Thread.sleep(100);
|
Thread.sleep(100);
|
||||||
|
@ -135,7 +135,7 @@ public class TaskLifecycleTest
|
|||||||
mdc = newMockMDC();
|
mdc = newMockMDC();
|
||||||
tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter()));
|
tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter()));
|
||||||
tb = new TaskToolboxFactory(
|
tb = new TaskToolboxFactory(
|
||||||
new TaskConfig(tmp.toString(), null, null, 50000),
|
new TaskConfig(tmp.toString(), null, null, 50000, null),
|
||||||
tac,
|
tac,
|
||||||
newMockEmitter(),
|
newMockEmitter(),
|
||||||
new DataSegmentPusher()
|
new DataSegmentPusher()
|
||||||
|
@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* Druid - a distributed column store.
|
||||||
|
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||||
|
*
|
||||||
|
* This program is free software; you can redistribute it and/or
|
||||||
|
* modify it under the terms of the GNU General Public License
|
||||||
|
* as published by the Free Software Foundation; either version 2
|
||||||
|
* of the License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License
|
||||||
|
* along with this program; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.druid.indexing.overlord;
|
||||||
|
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import io.druid.indexing.common.TestUtils;
|
||||||
|
import io.druid.indexing.overlord.setup.EC2UserData;
|
||||||
|
import io.druid.indexing.overlord.setup.GalaxyEC2UserData;
|
||||||
|
import io.druid.indexing.overlord.setup.StringEC2UserData;
|
||||||
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class WorkerSetupDataTest
|
||||||
|
{
|
||||||
|
@Test
|
||||||
|
public void testGalaxyEC2UserDataSerde() throws IOException
|
||||||
|
{
|
||||||
|
final String json = "{\"env\":\"druid\",\"version\":null,\"type\":\"typical\"}";
|
||||||
|
final GalaxyEC2UserData userData = (GalaxyEC2UserData) TestUtils.MAPPER.readValue(json, EC2UserData.class);
|
||||||
|
Assert.assertEquals("druid", userData.getEnv());
|
||||||
|
Assert.assertEquals("typical", userData.getType());
|
||||||
|
Assert.assertNull(userData.getVersion());
|
||||||
|
Assert.assertEquals("1234", userData.withVersion("1234").getVersion());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testStringEC2UserDataSerde() throws IOException
|
||||||
|
{
|
||||||
|
final String json = "{\"impl\":\"string\",\"data\":\"hey :ver:\",\"versionReplacementString\":\":ver:\",\"version\":\"1234\"}";
|
||||||
|
final StringEC2UserData userData = (StringEC2UserData) TestUtils.MAPPER.readValue(json, EC2UserData.class);
|
||||||
|
Assert.assertEquals("hey :ver:", userData.getData());
|
||||||
|
Assert.assertEquals("1234", userData.getVersion());
|
||||||
|
Assert.assertEquals(
|
||||||
|
Base64.encodeBase64String("hey 1234".getBytes(Charsets.UTF_8)),
|
||||||
|
userData.getUserDataBase64()
|
||||||
|
);
|
||||||
|
Assert.assertEquals(
|
||||||
|
Base64.encodeBase64String("hey xyz".getBytes(Charsets.UTF_8)),
|
||||||
|
userData.withVersion("xyz").getUserDataBase64()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -30,7 +30,7 @@ import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
|
|||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import io.druid.common.guava.DSuppliers;
|
import io.druid.common.guava.DSuppliers;
|
||||||
import io.druid.indexing.overlord.setup.EC2NodeData;
|
import io.druid.indexing.overlord.setup.EC2NodeData;
|
||||||
import io.druid.indexing.overlord.setup.GalaxyUserData;
|
import io.druid.indexing.overlord.setup.GalaxyEC2UserData;
|
||||||
import io.druid.indexing.overlord.setup.WorkerSetupData;
|
import io.druid.indexing.overlord.setup.WorkerSetupData;
|
||||||
import io.druid.jackson.DefaultObjectMapper;
|
import io.druid.jackson.DefaultObjectMapper;
|
||||||
import org.easymock.EasyMock;
|
import org.easymock.EasyMock;
|
||||||
@ -75,7 +75,6 @@ public class EC2AutoScalingStrategyTest
|
|||||||
.withPrivateIpAddress(IP);
|
.withPrivateIpAddress(IP);
|
||||||
|
|
||||||
strategy = new EC2AutoScalingStrategy(
|
strategy = new EC2AutoScalingStrategy(
|
||||||
new DefaultObjectMapper(),
|
|
||||||
amazonEC2Client,
|
amazonEC2Client,
|
||||||
new SimpleResourceManagementConfig().setWorkerPort(8080).setWorkerVersion(""),
|
new SimpleResourceManagementConfig().setWorkerPort(8080).setWorkerVersion(""),
|
||||||
DSuppliers.of(workerSetupData)
|
DSuppliers.of(workerSetupData)
|
||||||
@ -101,7 +100,7 @@ public class EC2AutoScalingStrategyTest
|
|||||||
1,
|
1,
|
||||||
"",
|
"",
|
||||||
new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.<String>newArrayList(), "foo"),
|
new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.<String>newArrayList(), "foo"),
|
||||||
new GalaxyUserData("env", "version", "type")
|
new GalaxyEC2UserData(new DefaultObjectMapper(), "env", "version", "type")
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ public class WorkerTaskMonitorTest
|
|||||||
workerCuratorCoordinator,
|
workerCuratorCoordinator,
|
||||||
new ThreadPoolTaskRunner(
|
new ThreadPoolTaskRunner(
|
||||||
new TaskToolboxFactory(
|
new TaskToolboxFactory(
|
||||||
new TaskConfig(tmp.toString(), null, null, 0),
|
new TaskConfig(tmp.toString(), null, null, 0, null),
|
||||||
null, null, null, null, null, null, null, null, null, null, null, new SegmentLoaderFactory(
|
null, null, null, null, null, null, null, null, null, null, null, new SegmentLoaderFactory(
|
||||||
new OmniSegmentLoader(
|
new OmniSegmentLoader(
|
||||||
ImmutableMap.<String, DataSegmentPuller>of(
|
ImmutableMap.<String, DataSegmentPuller>of(
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
|||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.metamx.common.exception.FormattedException;
|
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.data.input.ByteBufferInputRowParser;
|
import io.druid.data.input.ByteBufferInputRowParser;
|
||||||
import io.druid.data.input.Firehose;
|
import io.druid.data.input.Firehose;
|
||||||
@ -115,7 +114,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public InputRow nextRow() throws FormattedException
|
public InputRow nextRow()
|
||||||
{
|
{
|
||||||
final byte[] message = iter.next().message();
|
final byte[] message = iter.next().message();
|
||||||
|
|
||||||
@ -123,15 +122,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
return theParser.parse(ByteBuffer.wrap(message));
|
||||||
return theParser.parse(ByteBuffer.wrap(message));
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw new FormattedException.Builder()
|
|
||||||
.withErrorCode(FormattedException.ErrorCode.UNPARSABLE_ROW)
|
|
||||||
.withMessage(String.format("Error parsing[%s], got [%s]", ByteBuffer.wrap(message), e.toString()))
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
|||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.metamx.common.exception.FormattedException;
|
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.data.input.ByteBufferInputRowParser;
|
import io.druid.data.input.ByteBufferInputRowParser;
|
||||||
import io.druid.data.input.Firehose;
|
import io.druid.data.input.Firehose;
|
||||||
@ -123,7 +122,7 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public InputRow nextRow() throws FormattedException
|
public InputRow nextRow()
|
||||||
{
|
{
|
||||||
final Message message = iter.next().message();
|
final Message message = iter.next().message();
|
||||||
|
|
||||||
@ -134,17 +133,9 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu
|
|||||||
return parseMessage(message);
|
return parseMessage(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
public InputRow parseMessage(Message message) throws FormattedException
|
public InputRow parseMessage(Message message)
|
||||||
{
|
{
|
||||||
try {
|
return theParser.parse(message.payload());
|
||||||
return theParser.parse(message.payload());
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
throw new FormattedException.Builder()
|
|
||||||
.withErrorCode(FormattedException.ErrorCode.UNPARSABLE_ROW)
|
|
||||||
.withMessage(String.format("Error parsing[%s], got [%s]", message.payload(), e.toString()))
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
31
pom.xml
31
pom.xml
@ -18,20 +18,19 @@
|
|||||||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
<name>druid</name>
|
<name>druid</name>
|
||||||
<description>druid</description>
|
<description>druid</description>
|
||||||
<scm>
|
<scm>
|
||||||
<connection>scm:git:ssh://git@github.com/metamx/druid.git</connection>
|
<connection>scm:git:ssh://git@github.com/metamx/druid.git</connection>
|
||||||
<developerConnection>scm:git:ssh://git@github.com/metamx/druid.git</developerConnection>
|
<developerConnection>scm:git:ssh://git@github.com/metamx/druid.git</developerConnection>
|
||||||
<url>http://www.github.com/metamx/druid</url>
|
<url>http://www.github.com/metamx/druid</url>
|
||||||
<tag>druid-0.6.107-SNAPSHOT</tag>
|
<tag>druid-0.6.117-SNAPSHOT</tag>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<prerequisites>
|
<prerequisites>
|
||||||
@ -40,9 +39,9 @@
|
|||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<metamx.java-util.version>0.25.6</metamx.java-util.version>
|
<metamx.java-util.version>0.26.5</metamx.java-util.version>
|
||||||
<apache.curator.version>2.4.0</apache.curator.version>
|
<apache.curator.version>2.5.0</apache.curator.version>
|
||||||
<druid.api.version>0.2.3</druid.api.version>
|
<druid.api.version>0.2.4</druid.api.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
@ -199,22 +198,22 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.guava</groupId>
|
<groupId>com.google.guava</groupId>
|
||||||
<artifactId>guava</artifactId>
|
<artifactId>guava</artifactId>
|
||||||
<version>14.0.1</version>
|
<version>17.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject</groupId>
|
<groupId>com.google.inject</groupId>
|
||||||
<artifactId>guice</artifactId>
|
<artifactId>guice</artifactId>
|
||||||
<version>4.0-beta4</version>
|
<version>4.0-beta</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
<artifactId>guice-servlet</artifactId>
|
<artifactId>guice-servlet</artifactId>
|
||||||
<version>4.0-beta4</version>
|
<version>4.0-beta</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
<artifactId>guice-multibindings</artifactId>
|
<artifactId>guice-multibindings</artifactId>
|
||||||
<version>4.0-beta4</version>
|
<version>4.0-beta</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ibm.icu</groupId>
|
<groupId>com.ibm.icu</groupId>
|
||||||
@ -562,15 +561,7 @@
|
|||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-release-plugin</artifactId>
|
<artifactId>maven-release-plugin</artifactId>
|
||||||
<version>2.4.2</version>
|
<version>2.5</version>
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.maven.scm</groupId>
|
|
||||||
<artifactId>maven-scm-provider-gitexe</artifactId>
|
|
||||||
<!-- This version is necessary for use with git version 1.8.5 and above -->
|
|
||||||
<version>1.8.1</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</pluginManagement>
|
</pluginManagement>
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -28,10 +28,8 @@ import com.google.protobuf.ByteString;
|
|||||||
import com.google.protobuf.Descriptors;
|
import com.google.protobuf.Descriptors;
|
||||||
import com.google.protobuf.DynamicMessage;
|
import com.google.protobuf.DynamicMessage;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
import com.google.protobuf.InvalidProtocolBufferException;
|
||||||
import com.metamx.common.exception.FormattedException;
|
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.data.input.impl.DimensionsSpec;
|
import io.druid.data.input.impl.DimensionsSpec;
|
||||||
import io.druid.data.input.impl.InputRowParser;
|
|
||||||
import io.druid.data.input.impl.JSONParseSpec;
|
import io.druid.data.input.impl.JSONParseSpec;
|
||||||
import io.druid.data.input.impl.MapInputRowParser;
|
import io.druid.data.input.impl.MapInputRowParser;
|
||||||
import io.druid.data.input.impl.ParseSpec;
|
import io.druid.data.input.impl.ParseSpec;
|
||||||
@ -94,7 +92,7 @@ public class ProtoBufInputRowParser implements ByteBufferInputRowParser
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public InputRow parse(ByteBuffer input) throws FormattedException
|
public InputRow parse(ByteBuffer input)
|
||||||
{
|
{
|
||||||
// We should really create a ProtoBufBasedInputRow that does not need an intermediate map but accesses
|
// We should really create a ProtoBufBasedInputRow that does not need an intermediate map but accesses
|
||||||
// the DynamicMessage directly...
|
// the DynamicMessage directly...
|
||||||
|
@ -132,17 +132,17 @@ public class DruidDefaultSerializersModule extends SimpleModule
|
|||||||
public void serialize(Yielder yielder, final JsonGenerator jgen, SerializerProvider provider)
|
public void serialize(Yielder yielder, final JsonGenerator jgen, SerializerProvider provider)
|
||||||
throws IOException, JsonProcessingException
|
throws IOException, JsonProcessingException
|
||||||
{
|
{
|
||||||
jgen.writeStartArray();
|
|
||||||
try {
|
try {
|
||||||
|
jgen.writeStartArray();
|
||||||
while (!yielder.isDone()) {
|
while (!yielder.isDone()) {
|
||||||
final Object o = yielder.get();
|
final Object o = yielder.get();
|
||||||
jgen.writeObject(o);
|
jgen.writeObject(o);
|
||||||
yielder = yielder.next(null);
|
yielder = yielder.next(null);
|
||||||
}
|
}
|
||||||
|
jgen.writeEndArray();
|
||||||
} finally {
|
} finally {
|
||||||
yielder.close();
|
yielder.close();
|
||||||
}
|
}
|
||||||
jgen.writeEndArray();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -28,6 +28,7 @@ import com.metamx.common.guava.Yielders;
|
|||||||
import com.metamx.common.guava.YieldingAccumulator;
|
import com.metamx.common.guava.YieldingAccumulator;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
@ -55,16 +56,11 @@ public class BySegmentQueryRunner<T> implements QueryRunner<T>
|
|||||||
public Sequence<T> run(final Query<T> query, Map<String, Object> context)
|
public Sequence<T> run(final Query<T> query, Map<String, Object> context)
|
||||||
{
|
{
|
||||||
if (query.getContextBySegment(false)) {
|
if (query.getContextBySegment(false)) {
|
||||||
|
|
||||||
final Sequence<T> baseSequence = base.run(query, context);
|
final Sequence<T> baseSequence = base.run(query, context);
|
||||||
return new Sequence<T>()
|
final List<T> results = Sequences.toList(baseSequence, Lists.<T>newArrayList());
|
||||||
{
|
return Sequences.simple(
|
||||||
@Override
|
Arrays.asList(
|
||||||
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator)
|
|
||||||
{
|
|
||||||
List<T> results = Sequences.toList(baseSequence, Lists.<T>newArrayList());
|
|
||||||
|
|
||||||
return accumulator.accumulate(
|
|
||||||
initValue,
|
|
||||||
(T) new Result<BySegmentResultValueClass<T>>(
|
(T) new Result<BySegmentResultValueClass<T>>(
|
||||||
timestamp,
|
timestamp,
|
||||||
new BySegmentResultValueClass<T>(
|
new BySegmentResultValueClass<T>(
|
||||||
@ -73,29 +69,8 @@ public class BySegmentQueryRunner<T> implements QueryRunner<T>
|
|||||||
query.getIntervals().get(0)
|
query.getIntervals().get(0)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
}
|
);
|
||||||
|
|
||||||
@Override
|
|
||||||
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator)
|
|
||||||
{
|
|
||||||
List<T> results = Sequences.toList(baseSequence, Lists.<T>newArrayList());
|
|
||||||
|
|
||||||
final OutType retVal = accumulator.accumulate(
|
|
||||||
initValue,
|
|
||||||
(T) new Result<BySegmentResultValueClass<T>>(
|
|
||||||
timestamp,
|
|
||||||
new BySegmentResultValueClass<T>(
|
|
||||||
results,
|
|
||||||
segmentIdentifier,
|
|
||||||
query.getIntervals().get(0)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
return Yielders.done(retVal, null);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
return base.run(query, context);
|
return base.run(query, context);
|
||||||
}
|
}
|
||||||
|
@ -692,12 +692,14 @@ public class Druids
|
|||||||
{
|
{
|
||||||
private DataSource dataSource;
|
private DataSource dataSource;
|
||||||
private QuerySegmentSpec querySegmentSpec;
|
private QuerySegmentSpec querySegmentSpec;
|
||||||
|
private String bound;
|
||||||
private Map<String, Object> context;
|
private Map<String, Object> context;
|
||||||
|
|
||||||
public TimeBoundaryQueryBuilder()
|
public TimeBoundaryQueryBuilder()
|
||||||
{
|
{
|
||||||
dataSource = null;
|
dataSource = null;
|
||||||
querySegmentSpec = null;
|
querySegmentSpec = null;
|
||||||
|
bound = null;
|
||||||
context = null;
|
context = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -706,6 +708,7 @@ public class Druids
|
|||||||
return new TimeBoundaryQuery(
|
return new TimeBoundaryQuery(
|
||||||
dataSource,
|
dataSource,
|
||||||
querySegmentSpec,
|
querySegmentSpec,
|
||||||
|
bound,
|
||||||
context
|
context
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -715,6 +718,7 @@ public class Druids
|
|||||||
return new TimeBoundaryQueryBuilder()
|
return new TimeBoundaryQueryBuilder()
|
||||||
.dataSource(builder.dataSource)
|
.dataSource(builder.dataSource)
|
||||||
.intervals(builder.querySegmentSpec)
|
.intervals(builder.querySegmentSpec)
|
||||||
|
.bound(builder.bound)
|
||||||
.context(builder.context);
|
.context(builder.context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -748,6 +752,12 @@ public class Druids
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TimeBoundaryQueryBuilder bound(String b)
|
||||||
|
{
|
||||||
|
bound = b;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public TimeBoundaryQueryBuilder context(Map<String, Object> c)
|
public TimeBoundaryQueryBuilder context(Map<String, Object> c)
|
||||||
{
|
{
|
||||||
context = c;
|
context = c;
|
||||||
|
@ -89,7 +89,6 @@ public class GroupByParallelQueryRunner implements QueryRunner<Row>
|
|||||||
@Override
|
@Override
|
||||||
public Sequence<Row> run(final Query<Row> queryParam, final Map<String, Object> context)
|
public Sequence<Row> run(final Query<Row> queryParam, final Map<String, Object> context)
|
||||||
{
|
{
|
||||||
|
|
||||||
final GroupByQuery query = (GroupByQuery) queryParam;
|
final GroupByQuery query = (GroupByQuery) queryParam;
|
||||||
final Pair<IncrementalIndex, Accumulator<IncrementalIndex, Row>> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair(
|
final Pair<IncrementalIndex, Accumulator<IncrementalIndex, Row>> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair(
|
||||||
query,
|
query,
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
package io.druid.query;
|
package io.druid.query;
|
||||||
|
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.ResourceClosingSequence;
|
import com.metamx.common.guava.ResourceClosingSequence;
|
||||||
import com.metamx.common.guava.Sequence;
|
import com.metamx.common.guava.Sequence;
|
||||||
import io.druid.segment.ReferenceCountingSegment;
|
import io.druid.segment.ReferenceCountingSegment;
|
||||||
@ -54,7 +54,7 @@ public class ReferenceCountingSegmentQueryRunner<T> implements QueryRunner<T>
|
|||||||
return new ResourceClosingSequence<T>(baseSequence, closeable);
|
return new ResourceClosingSequence<T>(baseSequence, closeable);
|
||||||
}
|
}
|
||||||
catch (RuntimeException e) {
|
catch (RuntimeException e) {
|
||||||
Closeables.closeQuietly(closeable);
|
CloseQuietly.close(closeable);
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,9 +26,9 @@ import java.util.List;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Processing related interface
|
* Processing related interface
|
||||||
*
|
* <p/>
|
||||||
* An AggregatorFactory is an object that knows how to generate an Aggregator using a ColumnSelectorFactory.
|
* An AggregatorFactory is an object that knows how to generate an Aggregator using a ColumnSelectorFactory.
|
||||||
*
|
* <p/>
|
||||||
* This is useful as an abstraction to allow Aggregator classes to be written in terms of MetricSelector objects
|
* This is useful as an abstraction to allow Aggregator classes to be written in terms of MetricSelector objects
|
||||||
* without making any assumptions about how they are pulling values out of the base data. That is, the data is
|
* without making any assumptions about how they are pulling values out of the base data. That is, the data is
|
||||||
* provided to the Aggregator through the MetricSelector object, so whatever creates that object gets to choose how
|
* provided to the Aggregator through the MetricSelector object, so whatever creates that object gets to choose how
|
||||||
@ -37,7 +37,9 @@ import java.util.List;
|
|||||||
public interface AggregatorFactory
|
public interface AggregatorFactory
|
||||||
{
|
{
|
||||||
public Aggregator factorize(ColumnSelectorFactory metricFactory);
|
public Aggregator factorize(ColumnSelectorFactory metricFactory);
|
||||||
|
|
||||||
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory);
|
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory);
|
||||||
|
|
||||||
public Comparator getComparator();
|
public Comparator getComparator();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -48,6 +50,7 @@ public interface AggregatorFactory
|
|||||||
*
|
*
|
||||||
* @param lhs The left hand side of the combine
|
* @param lhs The left hand side of the combine
|
||||||
* @param rhs The right hand side of the combine
|
* @param rhs The right hand side of the combine
|
||||||
|
*
|
||||||
* @return an object representing the combination of lhs and rhs, this can be a new object or a mutation of the inputs
|
* @return an object representing the combination of lhs and rhs, this can be a new object or a mutation of the inputs
|
||||||
*/
|
*/
|
||||||
public Object combine(Object lhs, Object rhs);
|
public Object combine(Object lhs, Object rhs);
|
||||||
@ -61,11 +64,19 @@ public interface AggregatorFactory
|
|||||||
*/
|
*/
|
||||||
public AggregatorFactory getCombiningFactory();
|
public AggregatorFactory getCombiningFactory();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a list of all columns that this AggregatorFactory will scan
|
||||||
|
*
|
||||||
|
* @return AggregatorFactories for the columns to scan of the parent AggregatorFactory
|
||||||
|
*/
|
||||||
|
public List<AggregatorFactory> getRequiredColumns();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A method that knows how to "deserialize" the object from whatever form it might have been put into
|
* A method that knows how to "deserialize" the object from whatever form it might have been put into
|
||||||
* in order to transfer via JSON.
|
* in order to transfer via JSON.
|
||||||
*
|
*
|
||||||
* @param object the object to deserialize
|
* @param object the object to deserialize
|
||||||
|
*
|
||||||
* @return the deserialized object
|
* @return the deserialized object
|
||||||
*/
|
*/
|
||||||
public Object deserialize(Object object);
|
public Object deserialize(Object object);
|
||||||
@ -75,13 +86,17 @@ public interface AggregatorFactory
|
|||||||
* intermediate format than their final resultant output.
|
* intermediate format than their final resultant output.
|
||||||
*
|
*
|
||||||
* @param object the object to be finalized
|
* @param object the object to be finalized
|
||||||
|
*
|
||||||
* @return the finalized value that should be returned for the initial query
|
* @return the finalized value that should be returned for the initial query
|
||||||
*/
|
*/
|
||||||
public Object finalizeComputation(Object object);
|
public Object finalizeComputation(Object object);
|
||||||
|
|
||||||
public String getName();
|
public String getName();
|
||||||
|
|
||||||
public List<String> requiredFields();
|
public List<String> requiredFields();
|
||||||
|
|
||||||
public byte[] getCacheKey();
|
public byte[] getCacheKey();
|
||||||
|
|
||||||
public String getTypeName();
|
public String getTypeName();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import io.druid.segment.ColumnSelectorFactory;
|
import io.druid.segment.ColumnSelectorFactory;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -76,6 +77,12 @@ public class CountAggregatorFactory implements AggregatorFactory
|
|||||||
return new LongSumAggregatorFactory(name, name);
|
return new LongSumAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new CountAggregatorFactory(name));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
@ -136,12 +143,18 @@ public class CountAggregatorFactory implements AggregatorFactory
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
CountAggregatorFactory that = (CountAggregatorFactory) o;
|
CountAggregatorFactory that = (CountAggregatorFactory) o;
|
||||||
|
|
||||||
if (name != null ? !name.equals(that.name) : that.name != null) return false;
|
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -85,6 +85,12 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
|
|||||||
return new DoubleSumAggregatorFactory(name, name);
|
return new DoubleSumAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new DoubleSumAggregatorFactory(fieldName, fieldName));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
@ -158,13 +164,21 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
DoubleSumAggregatorFactory that = (DoubleSumAggregatorFactory) o;
|
DoubleSumAggregatorFactory that = (DoubleSumAggregatorFactory) o;
|
||||||
|
|
||||||
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) return false;
|
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) {
|
||||||
if (name != null ? !name.equals(that.name) : that.name != null) return false;
|
return false;
|
||||||
|
}
|
||||||
|
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ public class HistogramAggregatorFactory implements AggregatorFactory
|
|||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
this.breaksList = (breaksList == null) ? Lists.<Float>newArrayList() :breaksList;
|
this.breaksList = (breaksList == null) ? Lists.<Float>newArrayList() : breaksList;
|
||||||
this.breaks = new float[this.breaksList.size()];
|
this.breaks = new float[this.breaksList.size()];
|
||||||
for (int i = 0; i < this.breaksList.size(); ++i) {
|
for (int i = 0; i < this.breaksList.size(); ++i) {
|
||||||
this.breaks[i] = this.breaksList.get(i);
|
this.breaks[i] = this.breaksList.get(i);
|
||||||
@ -100,6 +100,12 @@ public class HistogramAggregatorFactory implements AggregatorFactory
|
|||||||
return new HistogramAggregatorFactory(name, name, breaksList);
|
return new HistogramAggregatorFactory(name, name, breaksList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new HistogramAggregatorFactory(fieldName, fieldName, breaksList));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
@ -183,15 +189,27 @@ public class HistogramAggregatorFactory implements AggregatorFactory
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
HistogramAggregatorFactory that = (HistogramAggregatorFactory) o;
|
HistogramAggregatorFactory that = (HistogramAggregatorFactory) o;
|
||||||
|
|
||||||
if (!Arrays.equals(breaks, that.breaks)) return false;
|
if (!Arrays.equals(breaks, that.breaks)) {
|
||||||
if (breaksList != null ? !breaksList.equals(that.breaksList) : that.breaksList != null) return false;
|
return false;
|
||||||
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) return false;
|
}
|
||||||
if (name != null ? !name.equals(that.name) : that.name != null) return false;
|
if (breaksList != null ? !breaksList.equals(that.breaksList) : that.breaksList != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -140,6 +140,22 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
|
|||||||
return new JavaScriptAggregatorFactory(name, Lists.newArrayList(name), fnCombine, fnReset, fnCombine);
|
return new JavaScriptAggregatorFactory(name, Lists.newArrayList(name), fnCombine, fnReset, fnCombine);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Lists.transform(
|
||||||
|
fieldNames,
|
||||||
|
new com.google.common.base.Function<String, AggregatorFactory>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory apply(String input)
|
||||||
|
{
|
||||||
|
return new JavaScriptAggregatorFactory(input, fieldNames, fnAggregate, fnReset, fnCombine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
|
@ -31,11 +31,11 @@ import java.util.Comparator;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class LongSumAggregatorFactory implements AggregatorFactory
|
public class LongSumAggregatorFactory implements AggregatorFactory
|
||||||
{
|
{
|
||||||
private static final byte CACHE_TYPE_ID = 0x1;
|
private static final byte CACHE_TYPE_ID = 0x1;
|
||||||
|
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|
||||||
@ -85,6 +85,12 @@ public class LongSumAggregatorFactory implements AggregatorFactory
|
|||||||
return new LongSumAggregatorFactory(name, name);
|
return new LongSumAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory(fieldName, fieldName));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
@ -154,13 +160,21 @@ public class LongSumAggregatorFactory implements AggregatorFactory
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
LongSumAggregatorFactory that = (LongSumAggregatorFactory) o;
|
LongSumAggregatorFactory that = (LongSumAggregatorFactory) o;
|
||||||
|
|
||||||
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) return false;
|
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) {
|
||||||
if (name != null ? !name.equals(that.name) : that.name != null) return false;
|
return false;
|
||||||
|
}
|
||||||
|
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -82,6 +82,12 @@ public class MaxAggregatorFactory implements AggregatorFactory
|
|||||||
return new MaxAggregatorFactory(name, name);
|
return new MaxAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new MaxAggregatorFactory(fieldName, fieldName));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
|
@ -82,6 +82,12 @@ public class MinAggregatorFactory implements AggregatorFactory
|
|||||||
return new MinAggregatorFactory(name, name);
|
return new MinAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new MinAggregatorFactory(fieldName, fieldName));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
|
@ -65,6 +65,12 @@ public class ToLowerCaseAggregatorFactory implements AggregatorFactory
|
|||||||
return baseAggregatorFactory.getCombiningFactory();
|
return baseAggregatorFactory.getCombiningFactory();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return baseAggregatorFactory.getRequiredColumns();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
|
@ -54,7 +54,7 @@ public class CardinalityAggregator implements Aggregator
|
|||||||
// nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases.
|
// nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases.
|
||||||
if (size == 1) {
|
if (size == 1) {
|
||||||
final String value = selector.lookupName(row.get(0));
|
final String value = selector.lookupName(row.get(0));
|
||||||
hasher.putString(value != null ? value : NULL_STRING);
|
hasher.putUnencodedChars(value != null ? value : NULL_STRING);
|
||||||
} else if (size != 0) {
|
} else if (size != 0) {
|
||||||
final String[] values = new String[size];
|
final String[] values = new String[size];
|
||||||
for (int i = 0; i < size; ++i) {
|
for (int i = 0; i < size; ++i) {
|
||||||
@ -67,7 +67,7 @@ public class CardinalityAggregator implements Aggregator
|
|||||||
if (i != 0) {
|
if (i != 0) {
|
||||||
hasher.putChar(SEPARATOR);
|
hasher.putChar(SEPARATOR);
|
||||||
}
|
}
|
||||||
hasher.putString(values[i]);
|
hasher.putUnencodedChars(values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -79,7 +79,7 @@ public class CardinalityAggregator implements Aggregator
|
|||||||
for (final DimensionSelector selector : selectors) {
|
for (final DimensionSelector selector : selectors) {
|
||||||
for (final Integer index : selector.getRow()) {
|
for (final Integer index : selector.getRow()) {
|
||||||
final String value = selector.lookupName(index);
|
final String value = selector.lookupName(index);
|
||||||
collector.add(hashFn.hashString(value == null ? NULL_STRING : value).asBytes());
|
collector.add(hashFn.hashUnencodedChars(value == null ? NULL_STRING : value).asBytes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,12 +32,14 @@ import io.druid.query.aggregation.AggregatorFactory;
|
|||||||
import io.druid.query.aggregation.Aggregators;
|
import io.druid.query.aggregation.Aggregators;
|
||||||
import io.druid.query.aggregation.BufferAggregator;
|
import io.druid.query.aggregation.BufferAggregator;
|
||||||
import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector;
|
import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector;
|
||||||
|
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
|
||||||
import io.druid.segment.ColumnSelectorFactory;
|
import io.druid.segment.ColumnSelectorFactory;
|
||||||
import io.druid.segment.DimensionSelector;
|
import io.druid.segment.DimensionSelector;
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -142,7 +144,23 @@ public class CardinalityAggregatorFactory implements AggregatorFactory
|
|||||||
@Override
|
@Override
|
||||||
public AggregatorFactory getCombiningFactory()
|
public AggregatorFactory getCombiningFactory()
|
||||||
{
|
{
|
||||||
return new CardinalityAggregatorFactory(name, fieldNames, byRow);
|
return new HyperUniquesAggregatorFactory(name, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Lists.transform(
|
||||||
|
fieldNames,
|
||||||
|
new Function<String, AggregatorFactory>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public AggregatorFactory apply(String input)
|
||||||
|
{
|
||||||
|
return new CardinalityAggregatorFactory(input, fieldNames, byRow);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -73,12 +73,13 @@ public class HyperUniquesAggregatorFactory implements AggregatorFactory
|
|||||||
return Aggregators.noopAggregator();
|
return Aggregators.noopAggregator();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (HyperLogLogCollector.class.isAssignableFrom(selector.classOfObject())) {
|
final Class classOfObject = selector.classOfObject();
|
||||||
|
if (classOfObject.equals(Object.class) || HyperLogLogCollector.class.isAssignableFrom(classOfObject)) {
|
||||||
return new HyperUniquesAggregator(name, selector);
|
return new HyperUniquesAggregator(name, selector);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IAE(
|
throw new IAE(
|
||||||
"Incompatible type for metric[%s], expected a HyperUnique, got a %s", fieldName, selector.classOfObject()
|
"Incompatible type for metric[%s], expected a HyperUnique, got a %s", fieldName, classOfObject
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,12 +92,13 @@ public class HyperUniquesAggregatorFactory implements AggregatorFactory
|
|||||||
return Aggregators.noopBufferAggregator();
|
return Aggregators.noopBufferAggregator();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (HyperLogLogCollector.class.isAssignableFrom(selector.classOfObject())) {
|
final Class classOfObject = selector.classOfObject();
|
||||||
|
if (classOfObject.equals(Object.class) || HyperLogLogCollector.class.isAssignableFrom(classOfObject)) {
|
||||||
return new HyperUniquesBufferAggregator(selector);
|
return new HyperUniquesBufferAggregator(selector);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IAE(
|
throw new IAE(
|
||||||
"Incompatible type for metric[%s], expected a HyperUnique, got a %s", fieldName, selector.classOfObject()
|
"Incompatible type for metric[%s], expected a HyperUnique, got a %s", fieldName, classOfObject
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,6 +133,12 @@ public class HyperUniquesAggregatorFactory implements AggregatorFactory
|
|||||||
return new HyperUniquesAggregatorFactory(name, name);
|
return new HyperUniquesAggregatorFactory(name, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AggregatorFactory> getRequiredColumns()
|
||||||
|
{
|
||||||
|
return Arrays.<AggregatorFactory>asList(new HyperUniquesAggregatorFactory(fieldName, fieldName));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object deserialize(Object object)
|
public Object deserialize(Object object)
|
||||||
{
|
{
|
||||||
|
@ -72,7 +72,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
private final List<AggregatorFactory> aggregatorSpecs;
|
private final List<AggregatorFactory> aggregatorSpecs;
|
||||||
private final List<PostAggregator> postAggregatorSpecs;
|
private final List<PostAggregator> postAggregatorSpecs;
|
||||||
|
|
||||||
private final Function<Sequence<Row>, Sequence<Row>> orderByLimitFn;
|
private final Function<Sequence<Row>, Sequence<Row>> limitFn;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public GroupByQuery(
|
public GroupByQuery(
|
||||||
@ -85,8 +85,9 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
@JsonProperty("postAggregations") List<PostAggregator> postAggregatorSpecs,
|
@JsonProperty("postAggregations") List<PostAggregator> postAggregatorSpecs,
|
||||||
@JsonProperty("having") HavingSpec havingSpec,
|
@JsonProperty("having") HavingSpec havingSpec,
|
||||||
@JsonProperty("limitSpec") LimitSpec limitSpec,
|
@JsonProperty("limitSpec") LimitSpec limitSpec,
|
||||||
@JsonProperty("orderBy") LimitSpec orderBySpec,
|
@JsonProperty("context") Map<String, Object> context,
|
||||||
@JsonProperty("context") Map<String, Object> context
|
// Backwards compatible
|
||||||
|
@JsonProperty("orderBy") LimitSpec orderBySpec
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
super(dataSource, querySegmentSpec, context);
|
super(dataSource, querySegmentSpec, context);
|
||||||
@ -129,7 +130,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
orderByLimitFn = postProcFn;
|
limitFn = postProcFn;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -146,7 +147,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
List<PostAggregator> postAggregatorSpecs,
|
List<PostAggregator> postAggregatorSpecs,
|
||||||
HavingSpec havingSpec,
|
HavingSpec havingSpec,
|
||||||
LimitSpec orderBySpec,
|
LimitSpec orderBySpec,
|
||||||
Function<Sequence<Row>, Sequence<Row>> orderByLimitFn,
|
Function<Sequence<Row>, Sequence<Row>> limitFn,
|
||||||
Map<String, Object> context
|
Map<String, Object> context
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -159,7 +160,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
this.postAggregatorSpecs = postAggregatorSpecs;
|
this.postAggregatorSpecs = postAggregatorSpecs;
|
||||||
this.havingSpec = havingSpec;
|
this.havingSpec = havingSpec;
|
||||||
this.limitSpec = orderBySpec;
|
this.limitSpec = orderBySpec;
|
||||||
this.orderByLimitFn = orderByLimitFn;
|
this.limitFn = limitFn;
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty("filter")
|
@JsonProperty("filter")
|
||||||
@ -199,7 +200,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public LimitSpec getOrderBy()
|
public LimitSpec getLimitSpec()
|
||||||
{
|
{
|
||||||
return limitSpec;
|
return limitSpec;
|
||||||
}
|
}
|
||||||
@ -218,7 +219,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
|
|
||||||
public Sequence<Row> applyLimit(Sequence<Row> results)
|
public Sequence<Row> applyLimit(Sequence<Row> results)
|
||||||
{
|
{
|
||||||
return orderByLimitFn.apply(results);
|
return limitFn.apply(results);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -234,7 +235,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
postAggregatorSpecs,
|
postAggregatorSpecs,
|
||||||
havingSpec,
|
havingSpec,
|
||||||
limitSpec,
|
limitSpec,
|
||||||
orderByLimitFn,
|
limitFn,
|
||||||
computeOverridenContext(contextOverride)
|
computeOverridenContext(contextOverride)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -252,7 +253,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
postAggregatorSpecs,
|
postAggregatorSpecs,
|
||||||
havingSpec,
|
havingSpec,
|
||||||
limitSpec,
|
limitSpec,
|
||||||
orderByLimitFn,
|
limitFn,
|
||||||
getContext()
|
getContext()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -270,7 +271,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
postAggregatorSpecs,
|
postAggregatorSpecs,
|
||||||
havingSpec,
|
havingSpec,
|
||||||
limitSpec,
|
limitSpec,
|
||||||
orderByLimitFn,
|
limitFn,
|
||||||
getContext()
|
getContext()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -292,11 +293,25 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
private List<OrderByColumnSpec> orderByColumnSpecs = Lists.newArrayList();
|
private List<OrderByColumnSpec> orderByColumnSpecs = Lists.newArrayList();
|
||||||
private int limit = Integer.MAX_VALUE;
|
private int limit = Integer.MAX_VALUE;
|
||||||
|
|
||||||
private Builder()
|
public Builder()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
private Builder(Builder builder)
|
public Builder(GroupByQuery query)
|
||||||
|
{
|
||||||
|
dataSource = query.getDataSource();
|
||||||
|
querySegmentSpec = query.getQuerySegmentSpec();
|
||||||
|
limitSpec = query.getLimitSpec();
|
||||||
|
dimFilter = query.getDimFilter();
|
||||||
|
granularity = query.getGranularity();
|
||||||
|
dimensions = query.getDimensions();
|
||||||
|
aggregatorSpecs = query.getAggregatorSpecs();
|
||||||
|
postAggregatorSpecs = query.getPostAggregatorSpecs();
|
||||||
|
havingSpec = query.getHavingSpec();
|
||||||
|
context = query.getContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder(Builder builder)
|
||||||
{
|
{
|
||||||
dataSource = builder.dataSource;
|
dataSource = builder.dataSource;
|
||||||
querySegmentSpec = builder.querySegmentSpec;
|
querySegmentSpec = builder.querySegmentSpec;
|
||||||
@ -490,7 +505,11 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
{
|
{
|
||||||
final LimitSpec theLimitSpec;
|
final LimitSpec theLimitSpec;
|
||||||
if (limitSpec == null) {
|
if (limitSpec == null) {
|
||||||
theLimitSpec = new DefaultLimitSpec(orderByColumnSpecs, limit);
|
if (orderByColumnSpecs.isEmpty() && limit == Integer.MAX_VALUE) {
|
||||||
|
theLimitSpec = new NoopLimitSpec();
|
||||||
|
} else {
|
||||||
|
theLimitSpec = new DefaultLimitSpec(orderByColumnSpecs, limit);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
theLimitSpec = limitSpec;
|
theLimitSpec = limitSpec;
|
||||||
}
|
}
|
||||||
@ -504,9 +523,9 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
aggregatorSpecs,
|
aggregatorSpecs,
|
||||||
postAggregatorSpecs,
|
postAggregatorSpecs,
|
||||||
havingSpec,
|
havingSpec,
|
||||||
null,
|
|
||||||
theLimitSpec,
|
theLimitSpec,
|
||||||
context
|
context,
|
||||||
|
null
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -515,36 +534,57 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
return "GroupByQuery{" +
|
return "GroupByQuery{" +
|
||||||
"limitSpec=" + limitSpec +
|
"limitSpec=" + limitSpec +
|
||||||
", dimFilter=" + dimFilter +
|
", dimFilter=" + dimFilter +
|
||||||
", granularity=" + granularity +
|
", granularity=" + granularity +
|
||||||
", dimensions=" + dimensions +
|
", dimensions=" + dimensions +
|
||||||
", aggregatorSpecs=" + aggregatorSpecs +
|
", aggregatorSpecs=" + aggregatorSpecs +
|
||||||
", postAggregatorSpecs=" + postAggregatorSpecs +
|
", postAggregatorSpecs=" + postAggregatorSpecs +
|
||||||
", orderByLimitFn=" + orderByLimitFn +
|
", limitFn=" + limitFn +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
if (!super.equals(o)) return false;
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!super.equals(o)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
GroupByQuery that = (GroupByQuery) o;
|
GroupByQuery that = (GroupByQuery) o;
|
||||||
|
|
||||||
if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null)
|
if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null) {
|
||||||
return false;
|
return false;
|
||||||
if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) return false;
|
}
|
||||||
if (dimensions != null ? !dimensions.equals(that.dimensions) : that.dimensions != null) return false;
|
if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) {
|
||||||
if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) return false;
|
|
||||||
if (havingSpec != null ? !havingSpec.equals(that.havingSpec) : that.havingSpec != null) return false;
|
|
||||||
if (limitSpec != null ? !limitSpec.equals(that.limitSpec) : that.limitSpec != null) return false;
|
|
||||||
if (orderByLimitFn != null ? !orderByLimitFn.equals(that.orderByLimitFn) : that.orderByLimitFn != null)
|
|
||||||
return false;
|
return false;
|
||||||
if (postAggregatorSpecs != null ? !postAggregatorSpecs.equals(that.postAggregatorSpecs) : that.postAggregatorSpecs != null)
|
}
|
||||||
|
if (dimensions != null ? !dimensions.equals(that.dimensions) : that.dimensions != null) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (havingSpec != null ? !havingSpec.equals(that.havingSpec) : that.havingSpec != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (limitSpec != null ? !limitSpec.equals(that.limitSpec) : that.limitSpec != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (limitFn != null ? !limitFn.equals(that.limitFn) : that.limitFn != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (postAggregatorSpecs != null
|
||||||
|
? !postAggregatorSpecs.equals(that.postAggregatorSpecs)
|
||||||
|
: that.postAggregatorSpecs != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -560,7 +600,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0);
|
result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0);
|
||||||
result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0);
|
result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0);
|
||||||
result = 31 * result + (postAggregatorSpecs != null ? postAggregatorSpecs.hashCode() : 0);
|
result = 31 * result + (postAggregatorSpecs != null ? postAggregatorSpecs.hashCode() : 0);
|
||||||
result = 31 * result + (orderByLimitFn != null ? orderByLimitFn.hashCode() : 0);
|
result = 31 * result + (limitFn != null ? limitFn.hashCode() : 0);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,12 +25,12 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
import com.metamx.common.guava.BaseSequence;
|
import com.metamx.common.guava.BaseSequence;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.FunctionalIterator;
|
import com.metamx.common.guava.FunctionalIterator;
|
||||||
import com.metamx.common.guava.Sequence;
|
import com.metamx.common.guava.Sequence;
|
||||||
import com.metamx.common.guava.Sequences;
|
import com.metamx.common.guava.Sequences;
|
||||||
@ -123,7 +123,7 @@ public class GroupByQueryEngine
|
|||||||
@Override
|
@Override
|
||||||
public void cleanup(RowIterator iterFromMake)
|
public void cleanup(RowIterator iterFromMake)
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(iterFromMake);
|
CloseQuietly.close(iterFromMake);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -135,7 +135,7 @@ public class GroupByQueryEngine
|
|||||||
@Override
|
@Override
|
||||||
public void close() throws IOException
|
public void close() throws IOException
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(bufferHolder);
|
CloseQuietly.close(bufferHolder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -24,12 +24,14 @@ import com.google.common.collect.Lists;
|
|||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
import com.metamx.common.guava.Accumulator;
|
import com.metamx.common.guava.Accumulator;
|
||||||
|
import io.druid.data.input.MapBasedRow;
|
||||||
import io.druid.data.input.Row;
|
import io.druid.data.input.Row;
|
||||||
import io.druid.data.input.Rows;
|
import io.druid.data.input.Rows;
|
||||||
import io.druid.granularity.QueryGranularity;
|
import io.druid.granularity.QueryGranularity;
|
||||||
import io.druid.query.aggregation.AggregatorFactory;
|
import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.query.dimension.DimensionSpec;
|
import io.druid.query.dimension.DimensionSpec;
|
||||||
import io.druid.segment.incremental.IncrementalIndex;
|
import io.druid.segment.incremental.IncrementalIndex;
|
||||||
|
import io.druid.segment.incremental.IncrementalIndexSchema;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -53,7 +55,7 @@ public class GroupByQueryHelper
|
|||||||
new Function<AggregatorFactory, AggregatorFactory>()
|
new Function<AggregatorFactory, AggregatorFactory>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public AggregatorFactory apply(@Nullable AggregatorFactory input)
|
public AggregatorFactory apply(AggregatorFactory input)
|
||||||
{
|
{
|
||||||
return input.getCombiningFactory();
|
return input.getCombiningFactory();
|
||||||
}
|
}
|
||||||
@ -64,7 +66,7 @@ public class GroupByQueryHelper
|
|||||||
new Function<DimensionSpec, String>()
|
new Function<DimensionSpec, String>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public String apply(@Nullable DimensionSpec input)
|
public String apply(DimensionSpec input)
|
||||||
{
|
{
|
||||||
return input.getOutputName();
|
return input.getOutputName();
|
||||||
}
|
}
|
||||||
@ -83,14 +85,14 @@ public class GroupByQueryHelper
|
|||||||
@Override
|
@Override
|
||||||
public IncrementalIndex accumulate(IncrementalIndex accumulated, Row in)
|
public IncrementalIndex accumulate(IncrementalIndex accumulated, Row in)
|
||||||
{
|
{
|
||||||
if (accumulated.add(Rows.toCaseInsensitiveInputRow(in, dimensions)) > config.getMaxResults()) {
|
if (accumulated.add(Rows.toCaseInsensitiveInputRow(in, dimensions), false) > config.getMaxResults()) {
|
||||||
throw new ISE("Computation exceeds maxRows limit[%s]", config.getMaxResults());
|
throw new ISE("Computation exceeds maxRows limit[%s]", config.getMaxResults());
|
||||||
}
|
}
|
||||||
|
|
||||||
return accumulated;
|
return accumulated;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return new Pair<IncrementalIndex, Accumulator<IncrementalIndex, Row>>(index, accumulator);
|
return new Pair<>(index, accumulator);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ import com.google.common.base.Function;
|
|||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Supplier;
|
import com.google.common.base.Supplier;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
@ -44,6 +45,7 @@ import io.druid.query.QueryToolChest;
|
|||||||
import io.druid.query.SubqueryQueryRunner;
|
import io.druid.query.SubqueryQueryRunner;
|
||||||
import io.druid.query.aggregation.AggregatorFactory;
|
import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.query.aggregation.MetricManipulationFn;
|
import io.druid.query.aggregation.MetricManipulationFn;
|
||||||
|
import io.druid.query.aggregation.PostAggregator;
|
||||||
import io.druid.segment.incremental.IncrementalIndex;
|
import io.druid.segment.incremental.IncrementalIndex;
|
||||||
import io.druid.segment.incremental.IncrementalIndexStorageAdapter;
|
import io.druid.segment.incremental.IncrementalIndexStorageAdapter;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
@ -60,7 +62,10 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||||||
{
|
{
|
||||||
};
|
};
|
||||||
private static final String GROUP_BY_MERGE_KEY = "groupByMerge";
|
private static final String GROUP_BY_MERGE_KEY = "groupByMerge";
|
||||||
private static final Map<String, Object> NO_MERGE_CONTEXT = ImmutableMap.<String, Object>of(GROUP_BY_MERGE_KEY, "false");
|
private static final Map<String, Object> NO_MERGE_CONTEXT = ImmutableMap.<String, Object>of(
|
||||||
|
GROUP_BY_MERGE_KEY,
|
||||||
|
"false"
|
||||||
|
);
|
||||||
private final Supplier<GroupByQueryConfig> configSupplier;
|
private final Supplier<GroupByQueryConfig> configSupplier;
|
||||||
private GroupByQueryEngine engine; // For running the outer query around a subquery
|
private GroupByQueryEngine engine; // For running the outer query around a subquery
|
||||||
|
|
||||||
@ -82,7 +87,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||||||
@Override
|
@Override
|
||||||
public Sequence<Row> run(Query<Row> input, Map<String, Object> context)
|
public Sequence<Row> run(Query<Row> input, Map<String, Object> context)
|
||||||
{
|
{
|
||||||
if (Boolean.valueOf((String) input.getContextValue(GROUP_BY_MERGE_KEY, "true"))) {
|
if (Boolean.valueOf(input.getContextValue(GROUP_BY_MERGE_KEY, "true"))) {
|
||||||
return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner, context);
|
return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner, context);
|
||||||
} else {
|
} else {
|
||||||
return runner.run(input, context);
|
return runner.run(input, context);
|
||||||
@ -93,33 +98,45 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||||||
|
|
||||||
private Sequence<Row> mergeGroupByResults(final GroupByQuery query, QueryRunner<Row> runner, Map<String, Object> context)
|
private Sequence<Row> mergeGroupByResults(final GroupByQuery query, QueryRunner<Row> runner, Map<String, Object> context)
|
||||||
{
|
{
|
||||||
|
|
||||||
Sequence<Row> result;
|
|
||||||
|
|
||||||
// If there's a subquery, merge subquery results and then apply the aggregator
|
// If there's a subquery, merge subquery results and then apply the aggregator
|
||||||
DataSource dataSource = query.getDataSource();
|
final DataSource dataSource = query.getDataSource();
|
||||||
if (dataSource instanceof QueryDataSource) {
|
if (dataSource instanceof QueryDataSource) {
|
||||||
GroupByQuery subquery;
|
GroupByQuery subquery;
|
||||||
try {
|
try {
|
||||||
subquery = (GroupByQuery) ((QueryDataSource) dataSource).getQuery();
|
subquery = (GroupByQuery) ((QueryDataSource) dataSource).getQuery();
|
||||||
} catch (ClassCastException e) {
|
}
|
||||||
|
catch (ClassCastException e) {
|
||||||
throw new UnsupportedOperationException("Subqueries must be of type 'group by'");
|
throw new UnsupportedOperationException("Subqueries must be of type 'group by'");
|
||||||
}
|
}
|
||||||
Sequence<Row> subqueryResult = mergeGroupByResults(subquery, runner, context);
|
final Sequence<Row> subqueryResult = mergeGroupByResults(subquery, runner, context);
|
||||||
IncrementalIndexStorageAdapter adapter
|
final List<AggregatorFactory> aggs = Lists.newArrayList();
|
||||||
= new IncrementalIndexStorageAdapter(makeIncrementalIndex(subquery, subqueryResult));
|
for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) {
|
||||||
result = engine.process(query, adapter);
|
aggs.addAll(aggregatorFactory.getRequiredColumns());
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need the inner incremental index to have all the columns required by the outer query
|
||||||
|
final GroupByQuery innerQuery = new GroupByQuery.Builder(query)
|
||||||
|
.setAggregatorSpecs(aggs)
|
||||||
|
.setInterval(subquery.getIntervals())
|
||||||
|
.setPostAggregatorSpecs(Lists.<PostAggregator>newArrayList())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final GroupByQuery outerQuery = new GroupByQuery.Builder(query)
|
||||||
|
.setLimitSpec(query.getLimitSpec().merge(subquery.getLimitSpec()))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(
|
||||||
|
makeIncrementalIndex(innerQuery, subqueryResult)
|
||||||
|
);
|
||||||
|
return outerQuery.applyLimit(engine.process(outerQuery, adapter));
|
||||||
} else {
|
} else {
|
||||||
result = runner.run(query, context);
|
return query.applyLimit(postAggregate(query, makeIncrementalIndex(query, runner.run(query, context))));
|
||||||
}
|
}
|
||||||
|
|
||||||
return postAggregate(query, makeIncrementalIndex(query, result));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private Sequence<Row> postAggregate(final GroupByQuery query, IncrementalIndex index)
|
private Sequence<Row> postAggregate(final GroupByQuery query, IncrementalIndex index)
|
||||||
{
|
{
|
||||||
Sequence<Row> sequence = Sequences.map(
|
return Sequences.map(
|
||||||
Sequences.simple(index.iterableWithPostAggregations(query.getPostAggregatorSpecs())),
|
Sequences.simple(index.iterableWithPostAggregations(query.getPostAggregatorSpecs())),
|
||||||
new Function<Row, Row>()
|
new Function<Row, Row>()
|
||||||
{
|
{
|
||||||
@ -129,13 +146,12 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||||||
final MapBasedRow row = (MapBasedRow) input;
|
final MapBasedRow row = (MapBasedRow) input;
|
||||||
return new MapBasedRow(
|
return new MapBasedRow(
|
||||||
query.getGranularity()
|
query.getGranularity()
|
||||||
.toDateTime(row.getTimestampFromEpoch()),
|
.toDateTime(row.getTimestampFromEpoch()),
|
||||||
row.getEvent()
|
row.getEvent()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return query.applyLimit(sequence);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private IncrementalIndex makeIncrementalIndex(GroupByQuery query, Sequence<Row> rows)
|
private IncrementalIndex makeIncrementalIndex(GroupByQuery query, Sequence<Row> rows)
|
||||||
@ -153,7 +169,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||||||
@Override
|
@Override
|
||||||
public Sequence<Row> mergeSequences(Sequence<Sequence<Row>> seqOfSequences)
|
public Sequence<Row> mergeSequences(Sequence<Sequence<Row>> seqOfSequences)
|
||||||
{
|
{
|
||||||
return new ConcatSequence<Row>(seqOfSequences);
|
return new ConcatSequence<>(seqOfSequences);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -87,12 +87,17 @@ public class DefaultLimitSpec implements LimitSpec
|
|||||||
|
|
||||||
if (limit == Integer.MAX_VALUE) {
|
if (limit == Integer.MAX_VALUE) {
|
||||||
return new SortingFn(ordering);
|
return new SortingFn(ordering);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
return new TopNFunction(ordering, limit);
|
return new TopNFunction(ordering, limit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LimitSpec merge(LimitSpec other)
|
||||||
|
{
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
private Ordering<Row> makeComparator(
|
private Ordering<Row> makeComparator(
|
||||||
List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs
|
List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs
|
||||||
)
|
)
|
||||||
@ -200,12 +205,18 @@ public class DefaultLimitSpec implements LimitSpec
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
LimitingFn that = (LimitingFn) o;
|
LimitingFn that = (LimitingFn) o;
|
||||||
|
|
||||||
if (limit != that.limit) return false;
|
if (limit != that.limit) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -232,12 +243,18 @@ public class DefaultLimitSpec implements LimitSpec
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
SortingFn sortingFn = (SortingFn) o;
|
SortingFn sortingFn = (SortingFn) o;
|
||||||
|
|
||||||
if (ordering != null ? !ordering.equals(sortingFn.ordering) : sortingFn.ordering != null) return false;
|
if (ordering != null ? !ordering.equals(sortingFn.ordering) : sortingFn.ordering != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -273,13 +290,21 @@ public class DefaultLimitSpec implements LimitSpec
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
TopNFunction that = (TopNFunction) o;
|
TopNFunction that = (TopNFunction) o;
|
||||||
|
|
||||||
if (limit != that.limit) return false;
|
if (limit != that.limit) {
|
||||||
if (sorter != null ? !sorter.equals(that.sorter) : that.sorter != null) return false;
|
return false;
|
||||||
|
}
|
||||||
|
if (sorter != null ? !sorter.equals(that.sorter) : that.sorter != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -296,13 +321,21 @@ public class DefaultLimitSpec implements LimitSpec
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
if (this == o) return true;
|
if (this == o) {
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
DefaultLimitSpec that = (DefaultLimitSpec) o;
|
DefaultLimitSpec that = (DefaultLimitSpec) o;
|
||||||
|
|
||||||
if (limit != that.limit) return false;
|
if (limit != that.limit) {
|
||||||
if (columns != null ? !columns.equals(that.columns) : that.columns != null) return false;
|
return false;
|
||||||
|
}
|
||||||
|
if (columns != null ? !columns.equals(that.columns) : that.columns != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -38,5 +38,11 @@ import java.util.List;
|
|||||||
})
|
})
|
||||||
public interface LimitSpec
|
public interface LimitSpec
|
||||||
{
|
{
|
||||||
public Function<Sequence<Row>, Sequence<Row>> build(List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs);
|
public Function<Sequence<Row>, Sequence<Row>> build(
|
||||||
|
List<DimensionSpec> dimensions,
|
||||||
|
List<AggregatorFactory> aggs,
|
||||||
|
List<PostAggregator> postAggs
|
||||||
|
);
|
||||||
|
|
||||||
|
public LimitSpec merge(LimitSpec other);
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,12 @@ public class NoopLimitSpec implements LimitSpec
|
|||||||
return Functions.identity();
|
return Functions.identity();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LimitSpec merge(LimitSpec other)
|
||||||
|
{
|
||||||
|
return other;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
|
@ -21,6 +21,7 @@ package io.druid.query.timeboundary;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
@ -48,12 +49,16 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
);
|
);
|
||||||
public static final String MAX_TIME = "maxTime";
|
public static final String MAX_TIME = "maxTime";
|
||||||
public static final String MIN_TIME = "minTime";
|
public static final String MIN_TIME = "minTime";
|
||||||
|
|
||||||
private static final byte CACHE_TYPE_ID = 0x0;
|
private static final byte CACHE_TYPE_ID = 0x0;
|
||||||
|
|
||||||
|
private final String bound;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public TimeBoundaryQuery(
|
public TimeBoundaryQuery(
|
||||||
@JsonProperty("dataSource") DataSource dataSource,
|
@JsonProperty("dataSource") DataSource dataSource,
|
||||||
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
|
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
|
||||||
|
@JsonProperty("bound") String bound,
|
||||||
@JsonProperty("context") Map<String, Object> context
|
@JsonProperty("context") Map<String, Object> context
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -63,6 +68,8 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
: querySegmentSpec,
|
: querySegmentSpec,
|
||||||
context
|
context
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.bound = bound == null ? "" : bound;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -77,12 +84,19 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
return Query.TIME_BOUNDARY;
|
return Query.TIME_BOUNDARY;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public String getBound()
|
||||||
|
{
|
||||||
|
return bound;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TimeBoundaryQuery withOverriddenContext(Map<String, Object> contextOverrides)
|
public TimeBoundaryQuery withOverriddenContext(Map<String, Object> contextOverrides)
|
||||||
{
|
{
|
||||||
return new TimeBoundaryQuery(
|
return new TimeBoundaryQuery(
|
||||||
getDataSource(),
|
getDataSource(),
|
||||||
getQuerySegmentSpec(),
|
getQuerySegmentSpec(),
|
||||||
|
bound,
|
||||||
computeOverridenContext(contextOverrides)
|
computeOverridenContext(contextOverrides)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -93,6 +107,7 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
return new TimeBoundaryQuery(
|
return new TimeBoundaryQuery(
|
||||||
getDataSource(),
|
getDataSource(),
|
||||||
spec,
|
spec,
|
||||||
|
bound,
|
||||||
getContext()
|
getContext()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -103,40 +118,33 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
return new TimeBoundaryQuery(
|
return new TimeBoundaryQuery(
|
||||||
dataSource,
|
dataSource,
|
||||||
getQuerySegmentSpec(),
|
getQuerySegmentSpec(),
|
||||||
|
bound,
|
||||||
getContext()
|
getContext()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getCacheKey()
|
public byte[] getCacheKey()
|
||||||
{
|
{
|
||||||
return ByteBuffer.allocate(1)
|
final byte[] boundBytes = bound.getBytes(Charsets.UTF_8);
|
||||||
|
return ByteBuffer.allocate(1 + boundBytes.length)
|
||||||
.put(CACHE_TYPE_ID)
|
.put(CACHE_TYPE_ID)
|
||||||
|
.put(boundBytes)
|
||||||
.array();
|
.array();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString()
|
|
||||||
{
|
|
||||||
return "TimeBoundaryQuery{" +
|
|
||||||
"dataSource='" + getDataSource() + '\'' +
|
|
||||||
", querySegmentSpec=" + getQuerySegmentSpec() +
|
|
||||||
", duration=" + getDuration() +
|
|
||||||
'}';
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iterable<Result<TimeBoundaryResultValue>> buildResult(DateTime timestamp, DateTime min, DateTime max)
|
public Iterable<Result<TimeBoundaryResultValue>> buildResult(DateTime timestamp, DateTime min, DateTime max)
|
||||||
{
|
{
|
||||||
List<Result<TimeBoundaryResultValue>> results = Lists.newArrayList();
|
List<Result<TimeBoundaryResultValue>> results = Lists.newArrayList();
|
||||||
Map<String, Object> result = Maps.newHashMap();
|
Map<String, Object> result = Maps.newHashMap();
|
||||||
|
|
||||||
if (min != null) {
|
if (min != null) {
|
||||||
result.put(TimeBoundaryQuery.MIN_TIME, min);
|
result.put(MIN_TIME, min);
|
||||||
}
|
}
|
||||||
if (max != null) {
|
if (max != null) {
|
||||||
result.put(TimeBoundaryQuery.MAX_TIME, max);
|
result.put(MAX_TIME, max);
|
||||||
}
|
}
|
||||||
if (!result.isEmpty()) {
|
if (!result.isEmpty()) {
|
||||||
results.add(new Result<TimeBoundaryResultValue>(timestamp, new TimeBoundaryResultValue(result)));
|
results.add(new Result<>(timestamp, new TimeBoundaryResultValue(result)));
|
||||||
}
|
}
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
@ -154,25 +162,74 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||||||
TimeBoundaryResultValue val = result.getValue();
|
TimeBoundaryResultValue val = result.getValue();
|
||||||
|
|
||||||
DateTime currMinTime = val.getMinTime();
|
DateTime currMinTime = val.getMinTime();
|
||||||
if (currMinTime.isBefore(min)) {
|
if (currMinTime != null && currMinTime.isBefore(min)) {
|
||||||
min = currMinTime;
|
min = currMinTime;
|
||||||
}
|
}
|
||||||
DateTime currMaxTime = val.getMaxTime();
|
DateTime currMaxTime = val.getMaxTime();
|
||||||
if (currMaxTime.isAfter(max)) {
|
if (currMaxTime != null && currMaxTime.isAfter(max)) {
|
||||||
max = currMaxTime;
|
max = currMaxTime;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Arrays.asList(
|
final DateTime ts;
|
||||||
new Result<TimeBoundaryResultValue>(
|
final DateTime minTime;
|
||||||
min,
|
final DateTime maxTime;
|
||||||
new TimeBoundaryResultValue(
|
|
||||||
ImmutableMap.<String, Object>of(
|
if (bound.equalsIgnoreCase(MIN_TIME)) {
|
||||||
TimeBoundaryQuery.MIN_TIME, min,
|
ts = min;
|
||||||
TimeBoundaryQuery.MAX_TIME, max
|
minTime = min;
|
||||||
)
|
maxTime = null;
|
||||||
)
|
} else if (bound.equalsIgnoreCase(MAX_TIME)) {
|
||||||
)
|
ts = max;
|
||||||
);
|
minTime = null;
|
||||||
|
maxTime = max;
|
||||||
|
} else {
|
||||||
|
ts = min;
|
||||||
|
minTime = min;
|
||||||
|
maxTime = max;
|
||||||
|
}
|
||||||
|
|
||||||
|
return buildResult(ts, minTime, maxTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString()
|
||||||
|
{
|
||||||
|
return "TimeBoundaryQuery{" +
|
||||||
|
"dataSource='" + getDataSource() + '\'' +
|
||||||
|
", querySegmentSpec=" + getQuerySegmentSpec() +
|
||||||
|
", duration=" + getDuration() +
|
||||||
|
", bound" + bound +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o)
|
||||||
|
{
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!super.equals(o)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
TimeBoundaryQuery that = (TimeBoundaryQuery) o;
|
||||||
|
|
||||||
|
if (!bound.equals(that.bound)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode()
|
||||||
|
{
|
||||||
|
int result = super.hashCode();
|
||||||
|
result = 31 * result + bound.hashCode();
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,8 +68,8 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
return segments;
|
return segments;
|
||||||
}
|
}
|
||||||
|
|
||||||
final T first = segments.get(0);
|
final T min = segments.get(0);
|
||||||
final T second = segments.get(segments.size() - 1);
|
final T max = segments.get(segments.size() - 1);
|
||||||
|
|
||||||
return Lists.newArrayList(
|
return Lists.newArrayList(
|
||||||
Iterables.filter(
|
Iterables.filter(
|
||||||
@ -79,8 +79,8 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
@Override
|
@Override
|
||||||
public boolean apply(T input)
|
public boolean apply(T input)
|
||||||
{
|
{
|
||||||
return input.getInterval().overlaps(first.getInterval()) || input.getInterval()
|
return (min != null && input.getInterval().overlaps(min.getInterval())) ||
|
||||||
.overlaps(second.getInterval());
|
(max != null && input.getInterval().overlaps(max.getInterval()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -112,7 +112,7 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
@Override
|
@Override
|
||||||
public Sequence<Result<TimeBoundaryResultValue>> mergeSequences(Sequence<Sequence<Result<TimeBoundaryResultValue>>> seqOfSequences)
|
public Sequence<Result<TimeBoundaryResultValue>> mergeSequences(Sequence<Sequence<Result<TimeBoundaryResultValue>>> seqOfSequences)
|
||||||
{
|
{
|
||||||
return new OrderedMergeSequence<Result<TimeBoundaryResultValue>>(getOrdering(), seqOfSequences);
|
return new OrderedMergeSequence<>(getOrdering(), seqOfSequences);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -147,9 +147,9 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
public byte[] computeCacheKey(TimeBoundaryQuery query)
|
public byte[] computeCacheKey(TimeBoundaryQuery query)
|
||||||
{
|
{
|
||||||
return ByteBuffer.allocate(2)
|
return ByteBuffer.allocate(2)
|
||||||
.put(TIMEBOUNDARY_QUERY)
|
.put(TIMEBOUNDARY_QUERY)
|
||||||
.put(query.getCacheKey())
|
.put(query.getCacheKey())
|
||||||
.array();
|
.array();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -178,11 +178,11 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public Result<TimeBoundaryResultValue> apply(@Nullable Object input)
|
public Result<TimeBoundaryResultValue> apply(Object input)
|
||||||
{
|
{
|
||||||
List<Object> result = (List<Object>) input;
|
List<Object> result = (List<Object>) input;
|
||||||
|
|
||||||
return new Result<TimeBoundaryResultValue>(
|
return new Result<>(
|
||||||
new DateTime(result.get(0)),
|
new DateTime(result.get(0)),
|
||||||
new TimeBoundaryResultValue(result.get(1))
|
new TimeBoundaryResultValue(result.get(1))
|
||||||
);
|
);
|
||||||
@ -193,7 +193,7 @@ public class TimeBoundaryQueryQueryToolChest
|
|||||||
@Override
|
@Override
|
||||||
public Sequence<Result<TimeBoundaryResultValue>> mergeSequences(Sequence<Sequence<Result<TimeBoundaryResultValue>>> seqOfSequences)
|
public Sequence<Result<TimeBoundaryResultValue>> mergeSequences(Sequence<Sequence<Result<TimeBoundaryResultValue>>> seqOfSequences)
|
||||||
{
|
{
|
||||||
return new MergeSequence<Result<TimeBoundaryResultValue>>(getOrdering(), seqOfSequences);
|
return new MergeSequence<>(getOrdering(), seqOfSequences);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,7 @@ import io.druid.query.QueryWatcher;
|
|||||||
import io.druid.query.Result;
|
import io.druid.query.Result;
|
||||||
import io.druid.segment.Segment;
|
import io.druid.segment.Segment;
|
||||||
import io.druid.segment.StorageAdapter;
|
import io.druid.segment.StorageAdapter;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -63,7 +64,7 @@ public class TimeBoundaryQueryRunnerFactory
|
|||||||
ExecutorService queryExecutor, Iterable<QueryRunner<Result<TimeBoundaryResultValue>>> queryRunners
|
ExecutorService queryExecutor, Iterable<QueryRunner<Result<TimeBoundaryResultValue>>> queryRunners
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
return new ChainedExecutionQueryRunner<Result<TimeBoundaryResultValue>>(
|
return new ChainedExecutionQueryRunner<>(
|
||||||
queryExecutor, toolChest.getOrdering(), queryWatcher, queryRunners
|
queryExecutor, toolChest.getOrdering(), queryWatcher, queryRunners
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -95,7 +96,7 @@ public class TimeBoundaryQueryRunnerFactory
|
|||||||
|
|
||||||
final TimeBoundaryQuery legacyQuery = (TimeBoundaryQuery) input;
|
final TimeBoundaryQuery legacyQuery = (TimeBoundaryQuery) input;
|
||||||
|
|
||||||
return new BaseSequence<Result<TimeBoundaryResultValue>, Iterator<Result<TimeBoundaryResultValue>>>(
|
return new BaseSequence<>(
|
||||||
new BaseSequence.IteratorMaker<Result<TimeBoundaryResultValue>, Iterator<Result<TimeBoundaryResultValue>>>()
|
new BaseSequence.IteratorMaker<Result<TimeBoundaryResultValue>, Iterator<Result<TimeBoundaryResultValue>>>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
@ -107,10 +108,18 @@ public class TimeBoundaryQueryRunnerFactory
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final DateTime minTime = legacyQuery.getBound().equalsIgnoreCase(TimeBoundaryQuery.MAX_TIME)
|
||||||
|
? null
|
||||||
|
: adapter.getMinTime();
|
||||||
|
final DateTime maxTime = legacyQuery.getBound().equalsIgnoreCase(TimeBoundaryQuery.MIN_TIME)
|
||||||
|
? null
|
||||||
|
: adapter.getMaxTime();
|
||||||
|
|
||||||
|
|
||||||
return legacyQuery.buildResult(
|
return legacyQuery.buildResult(
|
||||||
adapter.getInterval().getStart(),
|
adapter.getInterval().getStart(),
|
||||||
adapter.getMinTime(),
|
minTime,
|
||||||
adapter.getMaxTime()
|
maxTime
|
||||||
).iterator();
|
).iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,6 +99,10 @@ public class TimeBoundaryResultValue
|
|||||||
|
|
||||||
private DateTime getDateTimeValue(Object val)
|
private DateTime getDateTimeValue(Object val)
|
||||||
{
|
{
|
||||||
|
if (val == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
if (val instanceof DateTime) {
|
if (val instanceof DateTime) {
|
||||||
return (DateTime) val;
|
return (DateTime) val;
|
||||||
} else if (val instanceof String) {
|
} else if (val instanceof String) {
|
||||||
|
@ -25,7 +25,6 @@ import io.druid.query.QueryRunnerHelper;
|
|||||||
import io.druid.query.Result;
|
import io.druid.query.Result;
|
||||||
import io.druid.query.aggregation.Aggregator;
|
import io.druid.query.aggregation.Aggregator;
|
||||||
import io.druid.query.aggregation.AggregatorFactory;
|
import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.query.aggregation.PostAggregator;
|
|
||||||
import io.druid.segment.Cursor;
|
import io.druid.segment.Cursor;
|
||||||
import io.druid.segment.SegmentMissingException;
|
import io.druid.segment.SegmentMissingException;
|
||||||
import io.druid.segment.StorageAdapter;
|
import io.druid.segment.StorageAdapter;
|
||||||
@ -46,45 +45,43 @@ public class TimeseriesQueryEngine
|
|||||||
}
|
}
|
||||||
|
|
||||||
return QueryRunnerHelper.makeCursorBasedQuery(
|
return QueryRunnerHelper.makeCursorBasedQuery(
|
||||||
adapter,
|
adapter,
|
||||||
query.getQuerySegmentSpec().getIntervals(),
|
query.getQuerySegmentSpec().getIntervals(),
|
||||||
Filters.convertDimensionFilters(query.getDimensionsFilter()),
|
Filters.convertDimensionFilters(query.getDimensionsFilter()),
|
||||||
query.getGranularity(),
|
query.getGranularity(),
|
||||||
new Function<Cursor, Result<TimeseriesResultValue>>()
|
new Function<Cursor, Result<TimeseriesResultValue>>()
|
||||||
{
|
{
|
||||||
private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
|
private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
|
||||||
private final List<PostAggregator> postAggregatorSpecs = query.getPostAggregatorSpecs();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<TimeseriesResultValue> apply(Cursor cursor)
|
public Result<TimeseriesResultValue> apply(Cursor cursor)
|
||||||
{
|
{
|
||||||
Aggregator[] aggregators = QueryRunnerHelper.makeAggregators(cursor, aggregatorSpecs);
|
Aggregator[] aggregators = QueryRunnerHelper.makeAggregators(cursor, aggregatorSpecs);
|
||||||
|
try {
|
||||||
try {
|
while (!cursor.isDone()) {
|
||||||
while (!cursor.isDone()) {
|
for (Aggregator aggregator : aggregators) {
|
||||||
for (Aggregator aggregator : aggregators) {
|
aggregator.aggregate();
|
||||||
aggregator.aggregate();
|
|
||||||
}
|
|
||||||
cursor.advance();
|
|
||||||
}
|
|
||||||
|
|
||||||
TimeseriesResultBuilder bob = new TimeseriesResultBuilder(cursor.getTime());
|
|
||||||
|
|
||||||
for (Aggregator aggregator : aggregators) {
|
|
||||||
bob.addMetric(aggregator);
|
|
||||||
}
|
|
||||||
|
|
||||||
Result<TimeseriesResultValue> retVal = bob.build();
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
// cleanup
|
|
||||||
for (Aggregator agg : aggregators) {
|
|
||||||
agg.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
cursor.advance();
|
||||||
|
}
|
||||||
|
|
||||||
|
TimeseriesResultBuilder bob = new TimeseriesResultBuilder(cursor.getTime());
|
||||||
|
|
||||||
|
for (Aggregator aggregator : aggregators) {
|
||||||
|
bob.addMetric(aggregator);
|
||||||
|
}
|
||||||
|
|
||||||
|
Result<TimeseriesResultValue> retVal = bob.build();
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// cleanup
|
||||||
|
for (Aggregator agg : aggregators) {
|
||||||
|
agg.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,8 +19,8 @@
|
|||||||
|
|
||||||
package io.druid.query.topn;
|
package io.druid.query.topn;
|
||||||
|
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.collections.ResourceHolder;
|
import io.druid.collections.ResourceHolder;
|
||||||
import io.druid.collections.StupidPool;
|
import io.druid.collections.StupidPool;
|
||||||
import io.druid.query.aggregation.BufferAggregator;
|
import io.druid.query.aggregation.BufferAggregator;
|
||||||
@ -233,7 +233,7 @@ public class PooledTopNAlgorithm
|
|||||||
if (resultsBufHolder != null) {
|
if (resultsBufHolder != null) {
|
||||||
resultsBufHolder.get().clear();
|
resultsBufHolder.get().clear();
|
||||||
}
|
}
|
||||||
Closeables.closeQuietly(resultsBufHolder);
|
CloseQuietly.close(resultsBufHolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class PooledTopNParams extends TopNParams
|
public static class PooledTopNParams extends TopNParams
|
||||||
|
@ -19,8 +19,8 @@
|
|||||||
|
|
||||||
package io.druid.segment;
|
package io.druid.segment;
|
||||||
|
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.metamx.collections.spatial.ImmutableRTree;
|
import com.metamx.collections.spatial.ImmutableRTree;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.query.filter.BitmapIndexSelector;
|
import io.druid.query.filter.BitmapIndexSelector;
|
||||||
import io.druid.segment.column.Column;
|
import io.druid.segment.column.Column;
|
||||||
import io.druid.segment.column.DictionaryEncodedColumn;
|
import io.druid.segment.column.DictionaryEncodedColumn;
|
||||||
@ -95,7 +95,7 @@ public class ColumnSelectorBitmapIndexSelector implements BitmapIndexSelector
|
|||||||
return column.length();
|
return column.length();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,15 +165,10 @@ public class IndexIO
|
|||||||
}
|
}
|
||||||
|
|
||||||
final File indexFile = new File(inDir, "index.drd");
|
final File indexFile = new File(inDir, "index.drd");
|
||||||
InputStream in = null;
|
|
||||||
int version;
|
int version;
|
||||||
try {
|
try (InputStream in = new FileInputStream(indexFile)) {
|
||||||
in = new FileInputStream(indexFile);
|
|
||||||
version = in.read();
|
version = in.read();
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(in);
|
|
||||||
}
|
|
||||||
return version;
|
return version;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -194,8 +189,8 @@ public class IndexIO
|
|||||||
case 2:
|
case 2:
|
||||||
case 3:
|
case 3:
|
||||||
log.makeAlert("Attempt to load segment of version <= 3.")
|
log.makeAlert("Attempt to load segment of version <= 3.")
|
||||||
.addData("version", version)
|
.addData("version", version)
|
||||||
.emit();
|
.emit();
|
||||||
return false;
|
return false;
|
||||||
case 4:
|
case 4:
|
||||||
case 5:
|
case 5:
|
||||||
|
@ -38,6 +38,7 @@ import com.metamx.collections.spatial.RTree;
|
|||||||
import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy;
|
import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.FunctionalIterable;
|
import com.metamx.common.guava.FunctionalIterable;
|
||||||
import com.metamx.common.guava.MergeIterable;
|
import com.metamx.common.guava.MergeIterable;
|
||||||
import com.metamx.common.guava.nary.BinaryFn;
|
import com.metamx.common.guava.nary.BinaryFn;
|
||||||
@ -438,9 +439,9 @@ public class IndexMerger
|
|||||||
serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
|
serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(channel);
|
CloseQuietly.close(channel);
|
||||||
channel = null;
|
channel = null;
|
||||||
Closeables.closeQuietly(fileOutputStream);
|
CloseQuietly.close(fileOutputStream);
|
||||||
fileOutputStream = null;
|
fileOutputStream = null;
|
||||||
}
|
}
|
||||||
IndexIO.checkFileSize(indexFile);
|
IndexIO.checkFileSize(indexFile);
|
||||||
@ -881,7 +882,7 @@ public class IndexMerger
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(channel);
|
CloseQuietly.close(channel);
|
||||||
channel = null;
|
channel = null;
|
||||||
}
|
}
|
||||||
IndexIO.checkFileSize(indexFile);
|
IndexIO.checkFileSize(indexFile);
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
package io.druid.segment;
|
package io.druid.segment;
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.segment.data.ConciseCompressedIndexedInts;
|
import io.druid.segment.data.ConciseCompressedIndexedInts;
|
||||||
import io.druid.segment.data.Indexed;
|
import io.druid.segment.data.Indexed;
|
||||||
import io.druid.segment.data.IndexedFloats;
|
import io.druid.segment.data.IndexedFloats;
|
||||||
@ -118,9 +118,9 @@ public class MMappedIndexAdapter implements IndexableAdapter
|
|||||||
{
|
{
|
||||||
final boolean hasNext = currRow < numRows;
|
final boolean hasNext = currRow < numRows;
|
||||||
if (!hasNext && !done) {
|
if (!hasNext && !done) {
|
||||||
Closeables.closeQuietly(timestamps);
|
CloseQuietly.close(timestamps);
|
||||||
for (IndexedFloats floatMetric : floatMetrics) {
|
for (IndexedFloats floatMetric : floatMetrics) {
|
||||||
Closeables.closeQuietly(floatMetric);
|
CloseQuietly.close(floatMetric);
|
||||||
}
|
}
|
||||||
done = true;
|
done = true;
|
||||||
}
|
}
|
||||||
|
@ -20,11 +20,11 @@
|
|||||||
package io.druid.segment;
|
package io.druid.segment;
|
||||||
|
|
||||||
import com.google.common.io.ByteStreams;
|
import com.google.common.io.ByteStreams;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.io.InputSupplier;
|
import com.google.common.io.InputSupplier;
|
||||||
import com.google.common.io.OutputSupplier;
|
import com.google.common.io.OutputSupplier;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.common.utils.SerializerUtils;
|
import io.druid.common.utils.SerializerUtils;
|
||||||
import io.druid.segment.data.CompressedFloatsIndexedSupplier;
|
import io.druid.segment.data.CompressedFloatsIndexedSupplier;
|
||||||
import io.druid.segment.data.CompressedFloatsSupplierSerializer;
|
import io.druid.segment.data.CompressedFloatsSupplierSerializer;
|
||||||
@ -84,8 +84,8 @@ public class MetricHolder
|
|||||||
ByteStreams.copy(in, out);
|
ByteStreams.copy(in, out);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(out);
|
CloseQuietly.close(out);
|
||||||
Closeables.closeQuietly(in);
|
CloseQuietly.close(in);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,8 +22,8 @@ package io.druid.segment;
|
|||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.segment.column.BitmapIndex;
|
import io.druid.segment.column.BitmapIndex;
|
||||||
import io.druid.segment.column.Column;
|
import io.druid.segment.column.Column;
|
||||||
@ -208,10 +208,10 @@ public class QueryableIndexIndexableAdapter implements IndexableAdapter
|
|||||||
{
|
{
|
||||||
final boolean hasNext = currRow < numRows;
|
final boolean hasNext = currRow < numRows;
|
||||||
if (!hasNext && !done) {
|
if (!hasNext && !done) {
|
||||||
Closeables.closeQuietly(timestamps);
|
CloseQuietly.close(timestamps);
|
||||||
for (Object metric : metrics) {
|
for (Object metric : metrics) {
|
||||||
if (metric instanceof Closeable) {
|
if (metric instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) metric);
|
CloseQuietly.close((Closeable) metric);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
done = true;
|
done = true;
|
||||||
|
@ -23,7 +23,7 @@ import com.google.common.base.Function;
|
|||||||
import com.google.common.base.Predicates;
|
import com.google.common.base.Predicates;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.Sequence;
|
import com.metamx.common.guava.Sequence;
|
||||||
import com.metamx.common.guava.Sequences;
|
import com.metamx.common.guava.Sequences;
|
||||||
import io.druid.granularity.QueryGranularity;
|
import io.druid.granularity.QueryGranularity;
|
||||||
@ -109,7 +109,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
|
|||||||
return new DateTime(column.getLongSingleValueRow(0));
|
return new DateTime(column.getLongSingleValueRow(0));
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
|
|||||||
return new DateTime(column.getLongSingleValueRow(column.length() - 1));
|
return new DateTime(column.getLongSingleValueRow(column.length() - 1));
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -535,16 +535,16 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
|
|||||||
@Override
|
@Override
|
||||||
public void close() throws IOException
|
public void close() throws IOException
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(timestamps);
|
CloseQuietly.close(timestamps);
|
||||||
for (GenericColumn column : genericColumnCache.values()) {
|
for (GenericColumn column : genericColumnCache.values()) {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
for (ComplexColumn complexColumn : complexColumnCache.values()) {
|
for (ComplexColumn complexColumn : complexColumnCache.values()) {
|
||||||
Closeables.closeQuietly(complexColumn);
|
CloseQuietly.close(complexColumn);
|
||||||
}
|
}
|
||||||
for (Object column : objectColumnCache.values()) {
|
for (Object column : objectColumnCache.values()) {
|
||||||
if(column instanceof Closeable) {
|
if(column instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) column);
|
CloseQuietly.close((Closeable) column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -962,16 +962,16 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
|
|||||||
@Override
|
@Override
|
||||||
public void close() throws IOException
|
public void close() throws IOException
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(timestamps);
|
CloseQuietly.close(timestamps);
|
||||||
for (GenericColumn column : genericColumnCache.values()) {
|
for (GenericColumn column : genericColumnCache.values()) {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
for (ComplexColumn complexColumn : complexColumnCache.values()) {
|
for (ComplexColumn complexColumn : complexColumnCache.values()) {
|
||||||
Closeables.closeQuietly(complexColumn);
|
CloseQuietly.close(complexColumn);
|
||||||
}
|
}
|
||||||
for (Object column : objectColumnCache.values()) {
|
for (Object column : objectColumnCache.values()) {
|
||||||
if (column instanceof Closeable) {
|
if (column instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) column);
|
CloseQuietly.close((Closeable) column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
package io.druid.segment.column;
|
package io.druid.segment.column;
|
||||||
|
|
||||||
import com.google.common.base.Supplier;
|
import com.google.common.base.Supplier;
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
@ -68,7 +68,7 @@ class SimpleColumn implements Column
|
|||||||
return column.length();
|
return column.length();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(column);
|
CloseQuietly.close(column);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import com.google.common.io.Closeables;
|
|||||||
import com.google.common.primitives.Floats;
|
import com.google.common.primitives.Floats;
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.collections.ResourceHolder;
|
import io.druid.collections.ResourceHolder;
|
||||||
import io.druid.collections.StupidResourceHolder;
|
import io.druid.collections.StupidResourceHolder;
|
||||||
|
|
||||||
@ -123,7 +124,7 @@ public class CompressedFloatsIndexedSupplier implements Supplier<IndexedFloats>
|
|||||||
|
|
||||||
private void loadBuffer(int bufferNum)
|
private void loadBuffer(int bufferNum)
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(holder);
|
CloseQuietly.close(holder);
|
||||||
holder = baseFloatBuffers.get(bufferNum);
|
holder = baseFloatBuffers.get(bufferNum);
|
||||||
buffer = holder.get();
|
buffer = holder.get();
|
||||||
currIndex = bufferNum;
|
currIndex = bufferNum;
|
||||||
|
@ -106,17 +106,11 @@ public class CompressedFloatsSupplierSerializer
|
|||||||
|
|
||||||
flattener.close();
|
flattener.close();
|
||||||
|
|
||||||
OutputStream out = null;
|
try (OutputStream out = consolidatedOut.getOutput()) {
|
||||||
try {
|
|
||||||
out = consolidatedOut.getOutput();
|
|
||||||
|
|
||||||
out.write(CompressedFloatsIndexedSupplier.version);
|
out.write(CompressedFloatsIndexedSupplier.version);
|
||||||
out.write(Ints.toByteArray(numInserted));
|
out.write(Ints.toByteArray(numInserted));
|
||||||
out.write(Ints.toByteArray(sizePer));
|
out.write(Ints.toByteArray(sizePer));
|
||||||
ByteStreams.copy(flattener.combineStreams(), out);
|
ByteStreams.copy(flattener.combineStreams(), out);
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(out);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,7 @@ import com.google.common.io.Closeables;
|
|||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.collections.ResourceHolder;
|
import io.druid.collections.ResourceHolder;
|
||||||
import io.druid.collections.StupidResourceHolder;
|
import io.druid.collections.StupidResourceHolder;
|
||||||
|
|
||||||
@ -122,7 +123,7 @@ public class CompressedLongsIndexedSupplier implements Supplier<IndexedLongs>
|
|||||||
|
|
||||||
private void loadBuffer(int bufferNum)
|
private void loadBuffer(int bufferNum)
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(holder);
|
CloseQuietly.close(holder);
|
||||||
holder = baseLongBuffers.get(bufferNum);
|
holder = baseLongBuffers.get(bufferNum);
|
||||||
buffer = holder.get();
|
buffer = holder.get();
|
||||||
currIndex = bufferNum;
|
currIndex = bufferNum;
|
||||||
|
@ -100,17 +100,11 @@ public class CompressedLongsSupplierSerializer
|
|||||||
|
|
||||||
flattener.close();
|
flattener.close();
|
||||||
|
|
||||||
OutputStream out = null;
|
try (OutputStream out = consolidatedOut.getOutput()) {
|
||||||
try {
|
|
||||||
out = consolidatedOut.getOutput();
|
|
||||||
|
|
||||||
out.write(CompressedLongsIndexedSupplier.version);
|
out.write(CompressedLongsIndexedSupplier.version);
|
||||||
out.write(Ints.toByteArray(numInserted));
|
out.write(Ints.toByteArray(numInserted));
|
||||||
out.write(Ints.toByteArray(sizePer));
|
out.write(Ints.toByteArray(sizePer));
|
||||||
ByteStreams.copy(flattener.combineStreams(), out);
|
ByteStreams.copy(flattener.combineStreams(), out);
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(out);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
package io.druid.segment.data;
|
package io.druid.segment.data;
|
||||||
|
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.ning.compress.lzf.ChunkEncoder;
|
import com.ning.compress.lzf.ChunkEncoder;
|
||||||
import com.ning.compress.lzf.LZFChunk;
|
import com.ning.compress.lzf.LZFChunk;
|
||||||
import com.ning.compress.lzf.LZFDecoder;
|
import com.ning.compress.lzf.LZFDecoder;
|
||||||
@ -74,7 +74,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg
|
|||||||
buf.put(outputBytes, 0, numDecompressedBytes);
|
buf.put(outputBytes, 0, numDecompressedBytes);
|
||||||
buf.flip();
|
buf.flip();
|
||||||
|
|
||||||
Closeables.closeQuietly(outputBytesHolder);
|
CloseQuietly.close(outputBytesHolder);
|
||||||
|
|
||||||
return new ResourceHolder<T>()
|
return new ResourceHolder<T>()
|
||||||
{
|
{
|
||||||
@ -105,7 +105,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg
|
|||||||
|
|
||||||
final ResourceHolder<ChunkEncoder> encoder = CompressedPools.getChunkEncoder();
|
final ResourceHolder<ChunkEncoder> encoder = CompressedPools.getChunkEncoder();
|
||||||
LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length);
|
LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length);
|
||||||
Closeables.closeQuietly(encoder);
|
CloseQuietly.close(encoder);
|
||||||
|
|
||||||
return chunk.getData();
|
return chunk.getData();
|
||||||
}
|
}
|
||||||
|
@ -21,9 +21,9 @@ package io.druid.segment.data;
|
|||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.Ordering;
|
import com.google.common.collect.Ordering;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
@ -73,14 +73,14 @@ public class GenericIndexed<T> implements Indexed<T>
|
|||||||
allowReverseLookup = false;
|
allowReverseLookup = false;
|
||||||
}
|
}
|
||||||
if (prevVal instanceof Closeable) {
|
if (prevVal instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) prevVal);
|
CloseQuietly.close((Closeable) prevVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
prevVal = next;
|
prevVal = next;
|
||||||
++count;
|
++count;
|
||||||
}
|
}
|
||||||
if (prevVal instanceof Closeable) {
|
if (prevVal instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) prevVal);
|
CloseQuietly.close((Closeable) prevVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
ByteArrayOutputStream headerBytes = new ByteArrayOutputStream(4 + (count * 4));
|
ByteArrayOutputStream headerBytes = new ByteArrayOutputStream(4 + (count * 4));
|
||||||
@ -98,7 +98,7 @@ public class GenericIndexed<T> implements Indexed<T>
|
|||||||
valueBytes.write(bytes);
|
valueBytes.write(bytes);
|
||||||
|
|
||||||
if (object instanceof Closeable) {
|
if (object instanceof Closeable) {
|
||||||
Closeables.closeQuietly((Closeable) object);
|
CloseQuietly.close((Closeable) object);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ package io.druid.segment.data;
|
|||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.io.Closeables;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.collections.ResourceHolder;
|
import io.druid.collections.ResourceHolder;
|
||||||
import io.druid.collections.StupidResourceHolder;
|
import io.druid.collections.StupidResourceHolder;
|
||||||
|
|
||||||
@ -153,7 +153,7 @@ public class InMemoryCompressedFloats implements IndexedFloats
|
|||||||
private void loadBuffer(int bufferNum)
|
private void loadBuffer(int bufferNum)
|
||||||
{
|
{
|
||||||
loadBuffer = null;
|
loadBuffer = null;
|
||||||
Closeables.closeQuietly(holder);
|
CloseQuietly.close(holder);
|
||||||
final byte[] compressedBytes = compressedBuffers.get(bufferNum);
|
final byte[] compressedBytes = compressedBuffers.get(bufferNum);
|
||||||
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
|
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
|
||||||
loadBuffer = holder.get();
|
loadBuffer = holder.get();
|
||||||
@ -191,6 +191,6 @@ public class InMemoryCompressedFloats implements IndexedFloats
|
|||||||
@Override
|
@Override
|
||||||
public void close() throws IOException
|
public void close() throws IOException
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(holder);
|
CloseQuietly.close(holder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@ import com.google.common.base.Function;
|
|||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import io.druid.collections.ResourceHolder;
|
import io.druid.collections.ResourceHolder;
|
||||||
import io.druid.collections.StupidResourceHolder;
|
import io.druid.collections.StupidResourceHolder;
|
||||||
|
|
||||||
@ -163,7 +164,7 @@ public class InMemoryCompressedLongs implements IndexedLongs
|
|||||||
private void loadBuffer(int bufferNum)
|
private void loadBuffer(int bufferNum)
|
||||||
{
|
{
|
||||||
loadBuffer = null;
|
loadBuffer = null;
|
||||||
Closeables.closeQuietly(holder);
|
CloseQuietly.close(holder);
|
||||||
final byte[] compressedBytes = compressedBuffers.get(bufferNum);
|
final byte[] compressedBytes = compressedBuffers.get(bufferNum);
|
||||||
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
|
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
|
||||||
loadBuffer = holder.get();
|
loadBuffer = holder.get();
|
||||||
|
@ -46,6 +46,7 @@ import io.druid.segment.DimensionSelector;
|
|||||||
import io.druid.segment.FloatColumnSelector;
|
import io.druid.segment.FloatColumnSelector;
|
||||||
import io.druid.segment.ObjectColumnSelector;
|
import io.druid.segment.ObjectColumnSelector;
|
||||||
import io.druid.segment.TimestampColumnSelector;
|
import io.druid.segment.TimestampColumnSelector;
|
||||||
|
import io.druid.segment.data.IndexedInts;
|
||||||
import io.druid.segment.serde.ComplexMetricExtractor;
|
import io.druid.segment.serde.ComplexMetricExtractor;
|
||||||
import io.druid.segment.serde.ComplexMetricSerde;
|
import io.druid.segment.serde.ComplexMetricSerde;
|
||||||
import io.druid.segment.serde.ComplexMetrics;
|
import io.druid.segment.serde.ComplexMetrics;
|
||||||
@ -53,6 +54,7 @@ import org.joda.time.DateTime;
|
|||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
@ -133,17 +135,29 @@ public class IncrementalIndex implements Iterable<Row>
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int add(InputRow row)
|
||||||
|
{
|
||||||
|
// this is an ugly workaround to call ComplexMetricExtractor.extractValue at ingestion time
|
||||||
|
return add(row, true);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a new row. The row might correspond with another row that already exists, in which case this will
|
* Adds a new row. The row might correspond with another row that already exists, in which case this will
|
||||||
* update that row instead of inserting a new one.
|
* update that row instead of inserting a new one.
|
||||||
* <p/>
|
* <p/>
|
||||||
* This is *not* thread-safe. Calls to add() should always happen on the same thread.
|
*
|
||||||
|
* Calls to add() are thread safe.
|
||||||
|
*
|
||||||
|
* Setting deserializeComplexMetrics to false is necessary for intermediate aggregation such as groupBy that
|
||||||
|
* should not deserialize input columns using ComplexMetricSerde for aggregators that return complex metrics.
|
||||||
*
|
*
|
||||||
* @param row the row of data to add
|
* @param row the row of data to add
|
||||||
|
* @param deserializeComplexMetrics flag whether or not to call ComplexMetricExtractor.extractValue() on the input
|
||||||
|
* value for aggregators that return metrics other than float.
|
||||||
*
|
*
|
||||||
* @return the number of rows in the data set after adding the InputRow
|
* @return the number of rows in the data set after adding the InputRow
|
||||||
*/
|
*/
|
||||||
public int add(InputRow row)
|
public int add(InputRow row, final boolean deserializeComplexMetrics)
|
||||||
{
|
{
|
||||||
row = spatialDimensionRowFormatter.formatRow(row);
|
row = spatialDimensionRowFormatter.formatRow(row);
|
||||||
|
|
||||||
@ -186,7 +200,7 @@ public class IncrementalIndex implements Iterable<Row>
|
|||||||
dims = newDims;
|
dims = newDims;
|
||||||
}
|
}
|
||||||
|
|
||||||
TimeAndDims key = new TimeAndDims(Math.max(gran.truncate(row.getTimestampFromEpoch()), minTimestamp), dims);
|
final TimeAndDims key = new TimeAndDims(Math.max(gran.truncate(row.getTimestampFromEpoch()), minTimestamp), dims);
|
||||||
|
|
||||||
Aggregator[] aggs = facts.get(key);
|
Aggregator[] aggs = facts.get(key);
|
||||||
if (aggs == null) {
|
if (aggs == null) {
|
||||||
@ -231,54 +245,108 @@ public class IncrementalIndex implements Iterable<Row>
|
|||||||
final String typeName = agg.getTypeName();
|
final String typeName = agg.getTypeName();
|
||||||
final String columnName = column.toLowerCase();
|
final String columnName = column.toLowerCase();
|
||||||
|
|
||||||
if (typeName.equals("float")) {
|
final ObjectColumnSelector<Object> rawColumnSelector = new ObjectColumnSelector<Object>()
|
||||||
return new ObjectColumnSelector<Float>()
|
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public Class classOfObject()
|
public Class classOfObject()
|
||||||
{
|
{
|
||||||
return Float.TYPE;
|
return Object.class;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Float get()
|
public Object get()
|
||||||
{
|
{
|
||||||
return in.getFloatMetric(columnName);
|
return in.getRaw(columnName);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if(!deserializeComplexMetrics) {
|
||||||
|
return rawColumnSelector;
|
||||||
|
} else {
|
||||||
|
if (typeName.equals("float")) {
|
||||||
|
return rawColumnSelector;
|
||||||
|
}
|
||||||
|
|
||||||
|
final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
|
||||||
|
if (serde == null) {
|
||||||
|
throw new ISE("Don't know how to handle type[%s]", typeName);
|
||||||
|
}
|
||||||
|
|
||||||
|
final ComplexMetricExtractor extractor = serde.getExtractor();
|
||||||
|
return new ObjectColumnSelector()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Class classOfObject()
|
||||||
|
{
|
||||||
|
return extractor.extractedClass();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object get()
|
||||||
|
{
|
||||||
|
return extractor.extractValue(in, columnName);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
|
|
||||||
|
|
||||||
if (serde == null) {
|
|
||||||
throw new ISE("Don't know how to handle type[%s]", typeName);
|
|
||||||
}
|
|
||||||
|
|
||||||
final ComplexMetricExtractor extractor = serde.getExtractor();
|
|
||||||
|
|
||||||
return new ObjectColumnSelector()
|
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public Class classOfObject()
|
|
||||||
{
|
|
||||||
return extractor.extractedClass();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object get()
|
|
||||||
{
|
|
||||||
return extractor.extractValue(in, columnName);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DimensionSelector makeDimensionSelector(String dimension)
|
public DimensionSelector makeDimensionSelector(final String dimension)
|
||||||
{
|
{
|
||||||
// we should implement this, but this is going to be rewritten soon anyways
|
final String dimensionName = dimension.toLowerCase();
|
||||||
throw new UnsupportedOperationException(
|
return new DimensionSelector()
|
||||||
"Incremental index aggregation does not support dimension selectors"
|
{
|
||||||
);
|
@Override
|
||||||
|
public IndexedInts getRow()
|
||||||
|
{
|
||||||
|
final List<String> dimensionValues = in.getDimension(dimensionName);
|
||||||
|
final ArrayList<Integer> vals = Lists.newArrayList();
|
||||||
|
if (dimensionValues != null) {
|
||||||
|
for (int i = 0; i < dimensionValues.size(); ++i) {
|
||||||
|
vals.add(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return new IndexedInts()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public int size()
|
||||||
|
{
|
||||||
|
return vals.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int get(int index)
|
||||||
|
{
|
||||||
|
return vals.get(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Iterator<Integer> iterator()
|
||||||
|
{
|
||||||
|
return vals.iterator();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getValueCardinality()
|
||||||
|
{
|
||||||
|
throw new UnsupportedOperationException("value cardinality is unknown in incremental index");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String lookupName(int id)
|
||||||
|
{
|
||||||
|
return in.getDimension(dimensionName).get(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int lookupId(String name)
|
||||||
|
{
|
||||||
|
return in.getDimension(dimensionName).indexOf(name);
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,12 +23,14 @@ import com.google.common.base.Function;
|
|||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.base.Splitter;
|
import com.google.common.base.Splitter;
|
||||||
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.primitives.Floats;
|
import com.google.common.primitives.Floats;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
import com.metamx.common.parsers.ParseException;
|
||||||
import io.druid.data.input.InputRow;
|
import io.druid.data.input.InputRow;
|
||||||
import io.druid.data.input.impl.SpatialDimensionSchema;
|
import io.druid.data.input.impl.SpatialDimensionSchema;
|
||||||
|
|
||||||
@ -134,14 +136,20 @@ public class SpatialDimensionRowFormatter
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object getRaw(String dimension) {
|
public Object getRaw(String dimension)
|
||||||
|
{
|
||||||
return row.getRaw(dimension);
|
return row.getRaw(dimension);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float getFloatMetric(String metric)
|
public float getFloatMetric(String metric)
|
||||||
{
|
{
|
||||||
return row.getFloatMetric(metric);
|
try {
|
||||||
|
return row.getFloatMetric(metric);
|
||||||
|
}
|
||||||
|
catch (ParseException e) {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -67,6 +67,7 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
);
|
);
|
||||||
|
|
||||||
final CountDownLatch queriesStarted = new CountDownLatch(2);
|
final CountDownLatch queriesStarted = new CountDownLatch(2);
|
||||||
|
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
|
||||||
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
|
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
|
||||||
|
|
||||||
Capture<ListenableFuture> capturedFuture = new Capture<>();
|
Capture<ListenableFuture> capturedFuture = new Capture<>();
|
||||||
@ -88,9 +89,9 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
|
|
||||||
EasyMock.replay(watcher);
|
EasyMock.replay(watcher);
|
||||||
|
|
||||||
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
|
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
|
||||||
exec,
|
exec,
|
||||||
Ordering.<Integer>natural(),
|
Ordering.<Integer>natural(),
|
||||||
@ -138,11 +139,14 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
|
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
|
||||||
cause = (QueryInterruptedException)e.getCause();
|
cause = (QueryInterruptedException)e.getCause();
|
||||||
}
|
}
|
||||||
|
Assert.assertTrue(queriesInterrupted.await(500, TimeUnit.MILLISECONDS));
|
||||||
Assert.assertNotNull(cause);
|
Assert.assertNotNull(cause);
|
||||||
Assert.assertTrue(future.isCancelled());
|
Assert.assertTrue(future.isCancelled());
|
||||||
Assert.assertTrue(runner1.hasStarted);
|
Assert.assertTrue(runner1.hasStarted);
|
||||||
Assert.assertTrue(runner2.hasStarted);
|
Assert.assertTrue(runner2.hasStarted);
|
||||||
Assert.assertFalse(runner3.hasStarted);
|
Assert.assertTrue(runner1.interrupted);
|
||||||
|
Assert.assertTrue(runner2.interrupted);
|
||||||
|
Assert.assertTrue(!runner3.hasStarted || runner3.interrupted);
|
||||||
Assert.assertFalse(runner1.hasCompleted);
|
Assert.assertFalse(runner1.hasCompleted);
|
||||||
Assert.assertFalse(runner2.hasCompleted);
|
Assert.assertFalse(runner2.hasCompleted);
|
||||||
Assert.assertFalse(runner3.hasCompleted);
|
Assert.assertFalse(runner3.hasCompleted);
|
||||||
@ -171,6 +175,7 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
);
|
);
|
||||||
|
|
||||||
final CountDownLatch queriesStarted = new CountDownLatch(2);
|
final CountDownLatch queriesStarted = new CountDownLatch(2);
|
||||||
|
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
|
||||||
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
|
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
|
||||||
|
|
||||||
Capture<ListenableFuture> capturedFuture = new Capture<>();
|
Capture<ListenableFuture> capturedFuture = new Capture<>();
|
||||||
@ -192,9 +197,9 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
|
|
||||||
EasyMock.replay(watcher);
|
EasyMock.replay(watcher);
|
||||||
|
|
||||||
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted);
|
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
|
||||||
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
|
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
|
||||||
exec,
|
exec,
|
||||||
Ordering.<Integer>natural(),
|
Ordering.<Integer>natural(),
|
||||||
@ -211,7 +216,7 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
.dataSource("test")
|
.dataSource("test")
|
||||||
.intervals("2014/2015")
|
.intervals("2014/2015")
|
||||||
.aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count")))
|
.aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count")))
|
||||||
.context(ImmutableMap.<String, Object>of("timeout", (100), "queryId", "test"))
|
.context(ImmutableMap.<String, Object>of("timeout", 100, "queryId", "test"))
|
||||||
.build(),
|
.build(),
|
||||||
context
|
context
|
||||||
);
|
);
|
||||||
@ -231,10 +236,10 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
Assert.assertTrue(queryIsRegistered.await(1, TimeUnit.SECONDS));
|
Assert.assertTrue(queryIsRegistered.await(1, TimeUnit.SECONDS));
|
||||||
Assert.assertTrue(queriesStarted.await(1, TimeUnit.SECONDS));
|
Assert.assertTrue(queriesStarted.await(1, TimeUnit.SECONDS));
|
||||||
|
|
||||||
// cancel the query
|
|
||||||
Assert.assertTrue(capturedFuture.hasCaptured());
|
Assert.assertTrue(capturedFuture.hasCaptured());
|
||||||
ListenableFuture future = capturedFuture.getValue();
|
ListenableFuture future = capturedFuture.getValue();
|
||||||
|
|
||||||
|
// wait for query to time out
|
||||||
QueryInterruptedException cause = null;
|
QueryInterruptedException cause = null;
|
||||||
try {
|
try {
|
||||||
resultFuture.get();
|
resultFuture.get();
|
||||||
@ -243,11 +248,14 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
Assert.assertEquals("Query timeout", e.getCause().getMessage());
|
Assert.assertEquals("Query timeout", e.getCause().getMessage());
|
||||||
cause = (QueryInterruptedException)e.getCause();
|
cause = (QueryInterruptedException)e.getCause();
|
||||||
}
|
}
|
||||||
|
Assert.assertTrue(queriesInterrupted.await(500, TimeUnit.MILLISECONDS));
|
||||||
Assert.assertNotNull(cause);
|
Assert.assertNotNull(cause);
|
||||||
Assert.assertTrue(future.isCancelled());
|
Assert.assertTrue(future.isCancelled());
|
||||||
Assert.assertTrue(runner1.hasStarted);
|
Assert.assertTrue(runner1.hasStarted);
|
||||||
Assert.assertTrue(runner2.hasStarted);
|
Assert.assertTrue(runner2.hasStarted);
|
||||||
Assert.assertFalse(runner3.hasStarted);
|
Assert.assertTrue(runner1.interrupted);
|
||||||
|
Assert.assertTrue(runner2.interrupted);
|
||||||
|
Assert.assertTrue(!runner3.hasStarted || runner3.interrupted);
|
||||||
Assert.assertFalse(runner1.hasCompleted);
|
Assert.assertFalse(runner1.hasCompleted);
|
||||||
Assert.assertFalse(runner2.hasCompleted);
|
Assert.assertFalse(runner2.hasCompleted);
|
||||||
Assert.assertFalse(runner3.hasCompleted);
|
Assert.assertFalse(runner3.hasCompleted);
|
||||||
@ -257,21 +265,27 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
|
|
||||||
private static class DyingQueryRunner implements QueryRunner<Integer>
|
private static class DyingQueryRunner implements QueryRunner<Integer>
|
||||||
{
|
{
|
||||||
private final CountDownLatch latch;
|
private final CountDownLatch start;
|
||||||
|
private final CountDownLatch stop;
|
||||||
|
|
||||||
private boolean hasStarted = false;
|
private boolean hasStarted = false;
|
||||||
private boolean hasCompleted = false;
|
private boolean hasCompleted = false;
|
||||||
|
private boolean interrupted = false;
|
||||||
|
|
||||||
public DyingQueryRunner(CountDownLatch latch)
|
public DyingQueryRunner(CountDownLatch start, CountDownLatch stop)
|
||||||
{
|
{
|
||||||
this.latch = latch;
|
this.start = start;
|
||||||
|
this.stop = stop;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Sequence<Integer> run(Query<Integer> query, Map<String, Object> context)
|
public Sequence<Integer> run(Query<Integer> query, Map<String, Object> context)
|
||||||
{
|
{
|
||||||
hasStarted = true;
|
hasStarted = true;
|
||||||
latch.countDown();
|
start.countDown();
|
||||||
if (Thread.interrupted()) {
|
if (Thread.interrupted()) {
|
||||||
|
interrupted = true;
|
||||||
|
stop.countDown();
|
||||||
throw new QueryInterruptedException("I got killed");
|
throw new QueryInterruptedException("I got killed");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -280,10 +294,13 @@ public class ChainedExecutionQueryRunnerTest
|
|||||||
Thread.sleep(500);
|
Thread.sleep(500);
|
||||||
}
|
}
|
||||||
catch (InterruptedException e) {
|
catch (InterruptedException e) {
|
||||||
|
interrupted = true;
|
||||||
|
stop.countDown();
|
||||||
throw new QueryInterruptedException("I got killed");
|
throw new QueryInterruptedException("I got killed");
|
||||||
}
|
}
|
||||||
|
|
||||||
hasCompleted = true;
|
hasCompleted = true;
|
||||||
|
stop.countDown();
|
||||||
return Sequences.simple(Lists.newArrayList(123));
|
return Sequences.simple(Lists.newArrayList(123));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,6 +28,7 @@ import io.druid.query.aggregation.CountAggregatorFactory;
|
|||||||
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
|
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
|
||||||
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
|
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
|
||||||
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
||||||
|
import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory;
|
||||||
import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator;
|
import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator;
|
||||||
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
|
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
|
||||||
import io.druid.query.aggregation.post.ArithmeticPostAggregator;
|
import io.druid.query.aggregation.post.ArithmeticPostAggregator;
|
||||||
@ -110,6 +111,11 @@ public class QueryRunnerTestHelper
|
|||||||
"uniques",
|
"uniques",
|
||||||
"quality_uniques"
|
"quality_uniques"
|
||||||
);
|
);
|
||||||
|
public static final CardinalityAggregatorFactory qualityCardinality = new CardinalityAggregatorFactory(
|
||||||
|
"cardinality",
|
||||||
|
Arrays.asList("quality"),
|
||||||
|
false
|
||||||
|
);
|
||||||
public static final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L, null);
|
public static final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L, null);
|
||||||
public static final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
|
public static final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
|
||||||
public static final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
|
public static final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
|
||||||
|
@ -42,8 +42,13 @@ import io.druid.query.QueryRunnerTestHelper;
|
|||||||
import io.druid.query.QueryToolChest;
|
import io.druid.query.QueryToolChest;
|
||||||
import io.druid.query.aggregation.AggregatorFactory;
|
import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
|
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
|
||||||
|
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
|
||||||
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
||||||
import io.druid.query.aggregation.MaxAggregatorFactory;
|
import io.druid.query.aggregation.MaxAggregatorFactory;
|
||||||
|
import io.druid.query.aggregation.PostAggregator;
|
||||||
|
import io.druid.query.aggregation.post.ArithmeticPostAggregator;
|
||||||
|
import io.druid.query.aggregation.post.ConstantPostAggregator;
|
||||||
|
import io.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||||
import io.druid.query.dimension.DefaultDimensionSpec;
|
import io.druid.query.dimension.DefaultDimensionSpec;
|
||||||
import io.druid.query.dimension.DimensionSpec;
|
import io.druid.query.dimension.DimensionSpec;
|
||||||
import io.druid.query.dimension.ExtractionDimensionSpec;
|
import io.druid.query.dimension.ExtractionDimensionSpec;
|
||||||
@ -52,6 +57,7 @@ import io.druid.query.filter.JavaScriptDimFilter;
|
|||||||
import io.druid.query.filter.RegexDimFilter;
|
import io.druid.query.filter.RegexDimFilter;
|
||||||
import io.druid.query.groupby.having.EqualToHavingSpec;
|
import io.druid.query.groupby.having.EqualToHavingSpec;
|
||||||
import io.druid.query.groupby.having.GreaterThanHavingSpec;
|
import io.druid.query.groupby.having.GreaterThanHavingSpec;
|
||||||
|
import io.druid.query.groupby.having.HavingSpec;
|
||||||
import io.druid.query.groupby.having.OrHavingSpec;
|
import io.druid.query.groupby.having.OrHavingSpec;
|
||||||
import io.druid.query.groupby.orderby.DefaultLimitSpec;
|
import io.druid.query.groupby.orderby.DefaultLimitSpec;
|
||||||
import io.druid.query.groupby.orderby.LimitSpec;
|
import io.druid.query.groupby.orderby.LimitSpec;
|
||||||
@ -212,6 +218,36 @@ public class GroupByQueryRunnerTest
|
|||||||
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGroupByWithCardinality()
|
||||||
|
{
|
||||||
|
GroupByQuery query = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
|
QueryRunnerTestHelper.qualityCardinality
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setGranularity(QueryRunnerTestHelper.allGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Row> expectedResults = Arrays.asList(
|
||||||
|
createExpectedRow(
|
||||||
|
"2011-04-01",
|
||||||
|
"rows",
|
||||||
|
26L,
|
||||||
|
"cardinality",
|
||||||
|
QueryRunnerTestHelper.UNIQUES_9
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
Iterable<Row> results = runQuery(query);
|
||||||
|
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroupByWithDimExtractionFn()
|
public void testGroupByWithDimExtractionFn()
|
||||||
{
|
{
|
||||||
@ -1037,6 +1073,278 @@ public class GroupByQueryRunnerTest
|
|||||||
Assert.assertFalse(results.iterator().hasNext());
|
Assert.assertFalse(results.iterator().hasNext());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSubqueryWithPostAggregators()
|
||||||
|
{
|
||||||
|
final GroupByQuery subquery = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
|
||||||
|
.setDimFilter(new JavaScriptDimFilter("quality", "function(dim){ return true; }"))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
|
new LongSumAggregatorFactory("idx_subagg", "index")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx_subpostagg", "+", Arrays.<PostAggregator>asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"),
|
||||||
|
new ConstantPostAggregator("thousand", 1000, 1000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final GroupByQuery query = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(subquery)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("alias", "alias")))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
new LongSumAggregatorFactory("rows", "rows"),
|
||||||
|
new LongSumAggregatorFactory("idx", "idx_subpostagg")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx", "+", Arrays.asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_agg", "idx"),
|
||||||
|
new ConstantPostAggregator("ten_thousand", 10000, 10000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Row> expectedResults = Arrays.asList(
|
||||||
|
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 11135.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 11118.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 11158.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 11120.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 13870.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 11121.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 13900.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 11078.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 11119.0),
|
||||||
|
|
||||||
|
createExpectedRow("2011-04-02", "alias", "automotive", "rows", 1L, "idx", 11147.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "business", "rows", 1L, "idx", 11112.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 11166.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "health", "rows", 1L, "idx", 11113.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 13447.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 11114.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "premium", "rows", 3L, "idx", 13505.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "technology", "rows", 1L, "idx", 11097.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 11126.0)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subqueries are handled by the ToolChest
|
||||||
|
Iterable<Row> results = runQuery(query);
|
||||||
|
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSubqueryWithPostAggregatorsAndHaving()
|
||||||
|
{
|
||||||
|
final GroupByQuery subquery = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
|
||||||
|
.setDimFilter(new JavaScriptDimFilter("quality", "function(dim){ return true; }"))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.asList(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
|
new LongSumAggregatorFactory("idx_subagg", "index")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx_subpostagg",
|
||||||
|
"+",
|
||||||
|
Arrays.asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"),
|
||||||
|
new ConstantPostAggregator("thousand", 1000, 1000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setHavingSpec(
|
||||||
|
new HavingSpec()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public boolean eval(Row row)
|
||||||
|
{
|
||||||
|
return (row.getFloatMetric("idx_subpostagg") < 3800);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.addOrderByColumn("alias")
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final GroupByQuery query = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(subquery)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("alias", "alias")))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
new LongSumAggregatorFactory("rows", "rows"),
|
||||||
|
new LongSumAggregatorFactory("idx", "idx_subpostagg")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx", "+", Arrays.asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_agg", "idx"),
|
||||||
|
new ConstantPostAggregator("ten_thousand", 10000, 10000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Row> expectedResults = Arrays.asList(
|
||||||
|
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 11135.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 11118.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 11158.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 11120.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 11121.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 11078.0),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 11119.0),
|
||||||
|
|
||||||
|
createExpectedRow("2011-04-02", "alias", "automotive", "rows", 1L, "idx", 11147.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "business", "rows", 1L, "idx", 11112.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 11166.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "health", "rows", 1L, "idx", 11113.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 13447.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 11114.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "premium", "rows", 3L, "idx", 13505.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "technology", "rows", 1L, "idx", 11097.0),
|
||||||
|
createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 11126.0)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subqueries are handled by the ToolChest
|
||||||
|
Iterable<Row> results = runQuery(query);
|
||||||
|
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSubqueryWithMultiColumnAggregators()
|
||||||
|
{
|
||||||
|
final GroupByQuery subquery = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
|
||||||
|
.setDimFilter(new JavaScriptDimFilter("provider", "function(dim){ return true; }"))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.asList(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
|
new DoubleSumAggregatorFactory("idx_subagg", "index"),
|
||||||
|
new JavaScriptAggregatorFactory(
|
||||||
|
"js_agg",
|
||||||
|
Arrays.asList("index", "provider"),
|
||||||
|
"function(current, index, dim){return current + index + dim.length;}",
|
||||||
|
"function(){return 0;}",
|
||||||
|
"function(a,b){return a + b;}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx_subpostagg",
|
||||||
|
"+",
|
||||||
|
Arrays.asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"),
|
||||||
|
new ConstantPostAggregator("thousand", 1000, 1000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setHavingSpec(
|
||||||
|
new HavingSpec()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public boolean eval(Row row)
|
||||||
|
{
|
||||||
|
return (row.getFloatMetric("idx_subpostagg") < 3800);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.addOrderByColumn("alias")
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final GroupByQuery query = GroupByQuery
|
||||||
|
.builder()
|
||||||
|
.setDataSource(subquery)
|
||||||
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
|
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("alias", "alias")))
|
||||||
|
.setAggregatorSpecs(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
new LongSumAggregatorFactory("rows", "rows"),
|
||||||
|
new LongSumAggregatorFactory("idx", "idx_subpostagg"),
|
||||||
|
new DoubleSumAggregatorFactory("js_outer_agg", "js_agg")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setPostAggregatorSpecs(
|
||||||
|
Arrays.<PostAggregator>asList(
|
||||||
|
new ArithmeticPostAggregator(
|
||||||
|
"idx", "+", Arrays.asList(
|
||||||
|
new FieldAccessPostAggregator("the_idx_agg", "idx"),
|
||||||
|
new ConstantPostAggregator("ten_thousand", 10000, 10000)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setLimitSpec(
|
||||||
|
new DefaultLimitSpec(
|
||||||
|
Arrays.asList(
|
||||||
|
new OrderByColumnSpec(
|
||||||
|
"alias",
|
||||||
|
OrderByColumnSpec.Direction.DESCENDING
|
||||||
|
)
|
||||||
|
),
|
||||||
|
5
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Row> expectedResults = Arrays.asList(
|
||||||
|
createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 11119.0, "js_outer_agg", 123.92274475097656),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 11078.0, "js_outer_agg", 82.62254333496094),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 11121.0, "js_outer_agg", 125.58358001708984),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 11120.0, "js_outer_agg", 124.13470458984375),
|
||||||
|
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 11158.0, "js_outer_agg", 162.74722290039062)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subqueries are handled by the ToolChest
|
||||||
|
Iterable<Row> results = runQuery(query);
|
||||||
|
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||||
|
}
|
||||||
|
|
||||||
private Iterable<Row> runQuery(GroupByQuery query)
|
private Iterable<Row> runQuery(GroupByQuery query)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
@ -19,12 +19,16 @@
|
|||||||
|
|
||||||
package io.druid.query.timeboundary;
|
package io.druid.query.timeboundary;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.MapMaker;
|
||||||
import com.metamx.common.guava.Sequences;
|
import com.metamx.common.guava.Sequences;
|
||||||
import io.druid.query.Druids;
|
import io.druid.query.Druids;
|
||||||
import io.druid.query.QueryRunner;
|
import io.druid.query.QueryRunner;
|
||||||
import io.druid.query.QueryRunnerTestHelper;
|
import io.druid.query.QueryRunnerTestHelper;
|
||||||
import io.druid.query.Result;
|
import io.druid.query.Result;
|
||||||
|
import io.druid.query.RetryQueryRunner;
|
||||||
|
import io.druid.query.TableDataSource;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -32,8 +36,11 @@ import org.junit.runner.RunWith;
|
|||||||
import org.junit.runners.Parameterized;
|
import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
@ -76,4 +83,89 @@ public class TimeBoundaryQueryRunnerTest
|
|||||||
Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime);
|
Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime);
|
||||||
Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime);
|
Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testTimeBoundaryMax()
|
||||||
|
{
|
||||||
|
TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder()
|
||||||
|
.dataSource("testing")
|
||||||
|
.bound(TimeBoundaryQuery.MAX_TIME)
|
||||||
|
.build();
|
||||||
|
Map<String, Object> context = new MapMaker().makeMap();
|
||||||
|
context.put(RetryQueryRunner.missingSegments, Lists.newArrayList());
|
||||||
|
Iterable<Result<TimeBoundaryResultValue>> results = Sequences.toList(
|
||||||
|
runner.run(timeBoundaryQuery, context),
|
||||||
|
Lists.<Result<TimeBoundaryResultValue>>newArrayList()
|
||||||
|
);
|
||||||
|
TimeBoundaryResultValue val = results.iterator().next().getValue();
|
||||||
|
DateTime minTime = val.getMinTime();
|
||||||
|
DateTime maxTime = val.getMaxTime();
|
||||||
|
|
||||||
|
Assert.assertNull(minTime);
|
||||||
|
Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testTimeBoundaryMin()
|
||||||
|
{
|
||||||
|
TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder()
|
||||||
|
.dataSource("testing")
|
||||||
|
.bound(TimeBoundaryQuery.MIN_TIME)
|
||||||
|
.build();
|
||||||
|
Map<String, Object> context = new MapMaker().makeMap();
|
||||||
|
context.put(RetryQueryRunner.missingSegments, Lists.newArrayList());
|
||||||
|
Iterable<Result<TimeBoundaryResultValue>> results = Sequences.toList(
|
||||||
|
runner.run(timeBoundaryQuery, context),
|
||||||
|
Lists.<Result<TimeBoundaryResultValue>>newArrayList()
|
||||||
|
);
|
||||||
|
TimeBoundaryResultValue val = results.iterator().next().getValue();
|
||||||
|
DateTime minTime = val.getMinTime();
|
||||||
|
DateTime maxTime = val.getMaxTime();
|
||||||
|
|
||||||
|
Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime);
|
||||||
|
Assert.assertNull(maxTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergeResults() throws Exception
|
||||||
|
{
|
||||||
|
List<Result<TimeBoundaryResultValue>> results = Arrays.asList(
|
||||||
|
new Result<>(
|
||||||
|
new DateTime(),
|
||||||
|
new TimeBoundaryResultValue(
|
||||||
|
ImmutableMap.of(
|
||||||
|
"maxTime", "2012-01-01",
|
||||||
|
"minTime", "2011-01-01"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
new Result<>(
|
||||||
|
new DateTime(),
|
||||||
|
new TimeBoundaryResultValue(
|
||||||
|
ImmutableMap.of(
|
||||||
|
"maxTime", "2012-02-01",
|
||||||
|
"minTime", "2011-01-01"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null);
|
||||||
|
Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
|
||||||
|
|
||||||
|
Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(new DateTime("2012-02-01")));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergeResultsEmptyResults() throws Exception
|
||||||
|
{
|
||||||
|
List<Result<TimeBoundaryResultValue>> results = Lists.newArrayList();
|
||||||
|
|
||||||
|
TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null);
|
||||||
|
Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
|
||||||
|
|
||||||
|
Assert.assertFalse(actual.iterator().hasNext());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -180,6 +180,7 @@ public class TestIndex
|
|||||||
new TimestampSpec("ts", "iso"),
|
new TimestampSpec("ts", "iso"),
|
||||||
new DimensionsSpec(Arrays.asList(DIMENSIONS), null, null),
|
new DimensionsSpec(Arrays.asList(DIMENSIONS), null, null),
|
||||||
"\t",
|
"\t",
|
||||||
|
"\u0001",
|
||||||
Arrays.asList(COLUMNS)
|
Arrays.asList(COLUMNS)
|
||||||
),
|
),
|
||||||
null, null, null, null
|
null, null, null, null
|
||||||
|
@ -19,8 +19,8 @@
|
|||||||
|
|
||||||
package io.druid.segment.data;
|
package io.druid.segment.data;
|
||||||
|
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -47,7 +47,7 @@ public class CompressedLongsIndexedSupplierTest
|
|||||||
@Before
|
@Before
|
||||||
public void setUp() throws Exception
|
public void setUp() throws Exception
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(indexed);
|
CloseQuietly.close(indexed);
|
||||||
indexed = null;
|
indexed = null;
|
||||||
supplier = null;
|
supplier = null;
|
||||||
vals = null;
|
vals = null;
|
||||||
@ -56,7 +56,7 @@ public class CompressedLongsIndexedSupplierTest
|
|||||||
@After
|
@After
|
||||||
public void tearDown() throws Exception
|
public void tearDown() throws Exception
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(indexed);
|
CloseQuietly.close(indexed);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setupSimple()
|
private void setupSimple()
|
||||||
@ -247,7 +247,7 @@ public class CompressedLongsIndexedSupplierTest
|
|||||||
stopLatch.await();
|
stopLatch.await();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(indexed2);
|
CloseQuietly.close(indexed2);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (failureHappened.get()) {
|
if (failureHappened.get()) {
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -128,7 +128,7 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
|
|||||||
@Override
|
@Override
|
||||||
public Firehose connect(StringInputRowParser firehoseParser) throws IOException
|
public Firehose connect(StringInputRowParser firehoseParser) throws IOException
|
||||||
{
|
{
|
||||||
final StringInputRowParser stringParser = (StringInputRowParser) firehoseParser;
|
final StringInputRowParser stringParser = firehoseParser;
|
||||||
|
|
||||||
ConnectionOptions lyraOptions = new ConnectionOptions(this.connectionFactory);
|
ConnectionOptions lyraOptions = new ConnectionOptions(this.connectionFactory);
|
||||||
Config lyraConfig = new Config()
|
Config lyraConfig = new Config()
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -21,7 +21,6 @@ package io.druid.storage.s3;
|
|||||||
|
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.io.ByteStreams;
|
import com.google.common.io.ByteStreams;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.io.Files;
|
import com.google.common.io.Files;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
@ -95,9 +94,7 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
|||||||
try {
|
try {
|
||||||
s3Obj = s3Client.getObject(s3Coords.bucket, s3Coords.path);
|
s3Obj = s3Client.getObject(s3Coords.bucket, s3Coords.path);
|
||||||
|
|
||||||
InputStream in = null;
|
try (InputStream in = s3Obj.getDataInputStream()) {
|
||||||
try {
|
|
||||||
in = s3Obj.getDataInputStream();
|
|
||||||
final String key = s3Obj.getKey();
|
final String key = s3Obj.getKey();
|
||||||
if (key.endsWith(".zip")) {
|
if (key.endsWith(".zip")) {
|
||||||
CompressionUtils.unzip(in, outDir);
|
CompressionUtils.unzip(in, outDir);
|
||||||
@ -113,9 +110,6 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
|||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
throw new IOException(String.format("Problem decompressing object[%s]", s3Obj), e);
|
throw new IOException(String.format("Problem decompressing object[%s]", s3Obj), e);
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
Closeables.closeQuietly(in);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
S3Utils.closeStreamsQuietly(s3Obj);
|
S3Utils.closeStreamsQuietly(s3Obj);
|
||||||
@ -127,7 +121,8 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
|||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
try {
|
try {
|
||||||
FileUtils.deleteDirectory(outDir);
|
FileUtils.deleteDirectory(outDir);
|
||||||
} catch (IOException ioe) {
|
}
|
||||||
|
catch (IOException ioe) {
|
||||||
log.warn(
|
log.warn(
|
||||||
ioe,
|
ioe,
|
||||||
"Failed to remove output directory for segment[%s] after exception: %s",
|
"Failed to remove output directory for segment[%s] after exception: %s",
|
||||||
|
@ -18,8 +18,7 @@
|
|||||||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid-server</artifactId>
|
<artifactId>druid-server</artifactId>
|
||||||
@ -29,7 +28,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>0.6.122-SNAPSHOT</version>
|
<version>0.6.129-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -29,7 +29,6 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory;
|
|||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.common.util.concurrent.FutureCallback;
|
import com.google.common.util.concurrent.FutureCallback;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
@ -37,6 +36,7 @@ import com.metamx.common.IAE;
|
|||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
import com.metamx.common.RE;
|
import com.metamx.common.RE;
|
||||||
import com.metamx.common.guava.BaseSequence;
|
import com.metamx.common.guava.BaseSequence;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.guava.Sequence;
|
import com.metamx.common.guava.Sequence;
|
||||||
import com.metamx.common.guava.Sequences;
|
import com.metamx.common.guava.Sequences;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
@ -266,7 +266,7 @@ public class DirectDruidClient<T> implements QueryRunner<T>
|
|||||||
@Override
|
@Override
|
||||||
public void cleanup(JsonParserIterator<T> iterFromMake)
|
public void cleanup(JsonParserIterator<T> iterFromMake)
|
||||||
{
|
{
|
||||||
Closeables.closeQuietly(iterFromMake);
|
CloseQuietly.close(iterFromMake);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -309,7 +309,7 @@ public class DirectDruidClient<T> implements QueryRunner<T>
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (jp.getCurrentToken() == JsonToken.END_ARRAY) {
|
if (jp.getCurrentToken() == JsonToken.END_ARRAY) {
|
||||||
Closeables.closeQuietly(jp);
|
CloseQuietly.close(jp);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,10 +23,10 @@ import com.google.common.base.Throwables;
|
|||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.MapMaker;
|
import com.google.common.collect.MapMaker;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.metamx.common.IAE;
|
import com.metamx.common.IAE;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.lifecycle.LifecycleStart;
|
import com.metamx.common.lifecycle.LifecycleStart;
|
||||||
import com.metamx.common.lifecycle.LifecycleStop;
|
import com.metamx.common.lifecycle.LifecycleStop;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
@ -112,7 +112,7 @@ public class Announcer
|
|||||||
started = false;
|
started = false;
|
||||||
|
|
||||||
for (Map.Entry<String, PathChildrenCache> entry : listeners.entrySet()) {
|
for (Map.Entry<String, PathChildrenCache> entry : listeners.entrySet()) {
|
||||||
Closeables.closeQuietly(entry.getValue());
|
CloseQuietly.close(entry.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Map.Entry<String, ConcurrentMap<String, byte[]>> entry : announcements.entrySet()) {
|
for (Map.Entry<String, ConcurrentMap<String, byte[]>> entry : announcements.entrySet()) {
|
||||||
@ -353,7 +353,7 @@ public class Announcer
|
|||||||
cache.start();
|
cache.start();
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
Closeables.closeQuietly(cache);
|
CloseQuietly.close(cache);
|
||||||
throw Throwables.propagate(e);
|
throw Throwables.propagate(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -406,6 +406,12 @@ public class DiscoveryModule implements Module
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Collection<ServiceInstance<T>> getAllInstances() throws Exception
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void noteError(ServiceInstance<T> tServiceInstance) {
|
public void noteError(ServiceInstance<T> tServiceInstance) {
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.curator.x.discovery.ServiceInstance;
|
|||||||
import org.apache.curator.x.discovery.ServiceProvider;
|
import org.apache.curator.x.discovery.ServiceProvider;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
@ -62,6 +63,12 @@ public class ServerDiscoveryFactory
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Collection<ServiceInstance<T>> getAllInstances() throws Exception
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void noteError(ServiceInstance<T> tServiceInstance) {
|
public void noteError(ServiceInstance<T> tServiceInstance) {
|
||||||
// do nothing
|
// do nothing
|
||||||
|
@ -120,6 +120,14 @@ public class Initialization
|
|||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used for testing only
|
||||||
|
*/
|
||||||
|
protected static void clearLoadedModules()
|
||||||
|
{
|
||||||
|
extensionsMap.clear();
|
||||||
|
}
|
||||||
|
|
||||||
public synchronized static <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> clazz)
|
public synchronized static <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> clazz)
|
||||||
{
|
{
|
||||||
final TeslaAether aether = getAetherClient(config);
|
final TeslaAether aether = getAetherClient(config);
|
||||||
|
@ -23,11 +23,11 @@ import com.google.common.base.Preconditions;
|
|||||||
import com.google.common.base.Throwables;
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.io.Closeables;
|
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.metamx.common.exception.FormattedException;
|
import com.metamx.common.guava.CloseQuietly;
|
||||||
import com.metamx.common.lifecycle.LifecycleStart;
|
import com.metamx.common.lifecycle.LifecycleStart;
|
||||||
import com.metamx.common.lifecycle.LifecycleStop;
|
import com.metamx.common.lifecycle.LifecycleStop;
|
||||||
|
import com.metamx.common.parsers.ParseException;
|
||||||
import com.metamx.emitter.EmittingLogger;
|
import com.metamx.emitter.EmittingLogger;
|
||||||
import io.druid.data.input.Firehose;
|
import io.druid.data.input.Firehose;
|
||||||
import io.druid.data.input.InputRow;
|
import io.druid.data.input.InputRow;
|
||||||
@ -95,7 +95,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||||||
public void stop()
|
public void stop()
|
||||||
{
|
{
|
||||||
for (FireChief chief : chiefs.values()) {
|
for (FireChief chief : chiefs.values()) {
|
||||||
Closeables.closeQuietly(chief);
|
CloseQuietly.close(chief);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,7 +185,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||||||
|
|
||||||
long nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
long nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
final InputRow inputRow;
|
InputRow inputRow = null;
|
||||||
try {
|
try {
|
||||||
try {
|
try {
|
||||||
inputRow = firehose.nextRow();
|
inputRow = firehose.nextRow();
|
||||||
@ -214,10 +214,11 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||||||
}
|
}
|
||||||
metrics.incrementProcessed();
|
metrics.incrementProcessed();
|
||||||
}
|
}
|
||||||
catch (FormattedException e) {
|
catch (ParseException e) {
|
||||||
log.info(e, "unparseable line: %s", e.getDetails());
|
if (inputRow != null) {
|
||||||
|
log.error(e, "unparseable line: %s", inputRow);
|
||||||
|
}
|
||||||
metrics.incrementUnparseable();
|
metrics.incrementUnparseable();
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -237,7 +238,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(firehose);
|
CloseQuietly.close(firehose);
|
||||||
if (normalExit) {
|
if (normalExit) {
|
||||||
plumber.finishJob();
|
plumber.finishJob();
|
||||||
plumber = null;
|
plumber = null;
|
||||||
|
@ -31,18 +31,13 @@ import com.ircclouds.irc.api.domain.messages.ChannelPrivMsg;
|
|||||||
import com.ircclouds.irc.api.listeners.VariousMessageListenerAdapter;
|
import com.ircclouds.irc.api.listeners.VariousMessageListenerAdapter;
|
||||||
import com.ircclouds.irc.api.state.IIRCState;
|
import com.ircclouds.irc.api.state.IIRCState;
|
||||||
import com.metamx.common.Pair;
|
import com.metamx.common.Pair;
|
||||||
import com.metamx.common.exception.FormattedException;
|
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.data.input.ByteBufferInputRowParser;
|
|
||||||
import io.druid.data.input.Firehose;
|
import io.druid.data.input.Firehose;
|
||||||
import io.druid.data.input.FirehoseFactory;
|
import io.druid.data.input.FirehoseFactory;
|
||||||
import io.druid.data.input.InputRow;
|
import io.druid.data.input.InputRow;
|
||||||
import io.druid.data.input.impl.InputRowParser;
|
|
||||||
import io.druid.data.input.impl.ParseSpec;
|
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.concurrent.LinkedBlockingQueue;
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user