mirror of https://github.com/apache/druid.git
Fix some problems reported by PVS-Studio (#7738)
* Fix some problems reported by PVS-Studio * Address comments
This commit is contained in:
parent
b051d6688d
commit
782863ed0f
|
@ -9,6 +9,7 @@ target
|
||||||
.classpath
|
.classpath
|
||||||
.idea
|
.idea
|
||||||
.project
|
.project
|
||||||
|
.PVS-Studio
|
||||||
.settings/
|
.settings/
|
||||||
*.log
|
*.log
|
||||||
*.DS_Store
|
*.DS_Store
|
||||||
|
|
|
@ -233,8 +233,7 @@ public class SelectBenchmark
|
||||||
factory = new SelectQueryRunnerFactory(
|
factory = new SelectQueryRunnerFactory(
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
JSON_MAPPER,
|
JSON_MAPPER,
|
||||||
QueryBenchmarkUtil.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryBenchmarkUtil.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
selectConfigSupplier
|
|
||||||
),
|
),
|
||||||
new SelectQueryEngine(),
|
new SelectQueryEngine(),
|
||||||
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
||||||
|
|
|
@ -158,7 +158,9 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, LifeCycle
|
||||||
private synchronized boolean compareAndSet(State expected, State transition)
|
private synchronized boolean compareAndSet(State expected, State transition)
|
||||||
{
|
{
|
||||||
if (current == expected) {
|
if (current == expected) {
|
||||||
return transition(transition);
|
current = transition;
|
||||||
|
notifyAll();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -189,13 +191,6 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, LifeCycle
|
||||||
return current;
|
return current;
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized boolean transition(State transition)
|
|
||||||
{
|
|
||||||
current = transition;
|
|
||||||
notifyAll();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private synchronized State get()
|
private synchronized State get()
|
||||||
{
|
{
|
||||||
return current;
|
return current;
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.apache.druid.segment.loading;
|
package org.apache.druid.segment.loading;
|
||||||
|
|
||||||
import org.apache.druid.guice.annotations.ExtensionPoint;
|
import org.apache.druid.guice.annotations.ExtensionPoint;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
import org.apache.druid.java.util.common.logger.Logger;
|
import org.apache.druid.java.util.common.logger.Logger;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
|
|
||||||
|
@ -30,6 +31,15 @@ public interface DataSegmentKiller
|
||||||
{
|
{
|
||||||
Logger log = new Logger(DataSegmentKiller.class);
|
Logger log = new Logger(DataSegmentKiller.class);
|
||||||
|
|
||||||
|
static String descriptorPath(String path)
|
||||||
|
{
|
||||||
|
int lastPathSeparatorIndex = path.lastIndexOf('/');
|
||||||
|
if (lastPathSeparatorIndex == -1) {
|
||||||
|
throw new IAE("Invalid path: [%s], should contain '/'", path);
|
||||||
|
}
|
||||||
|
return path.substring(0, lastPathSeparatorIndex) + "/descriptor.json";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes segment files (index and metadata) from deep storage.
|
* Removes segment files (index and metadata) from deep storage.
|
||||||
* @param segment the segment to kill
|
* @param segment the segment to kill
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class AzureTaskLogs implements TaskLogs
|
||||||
{
|
{
|
||||||
final String taskKey = getTaskLogKey(taskid);
|
final String taskKey = getTaskLogKey(taskid);
|
||||||
log.info("Pushing task log %s to: %s", logFile, taskKey);
|
log.info("Pushing task log %s to: %s", logFile, taskKey);
|
||||||
pushTaskFile(taskid, logFile, taskKey);
|
pushTaskFile(logFile, taskKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -61,10 +61,10 @@ public class AzureTaskLogs implements TaskLogs
|
||||||
{
|
{
|
||||||
final String taskKey = getTaskReportsKey(taskid);
|
final String taskKey = getTaskReportsKey(taskid);
|
||||||
log.info("Pushing task reports %s to: %s", reportFile, taskKey);
|
log.info("Pushing task reports %s to: %s", reportFile, taskKey);
|
||||||
pushTaskFile(taskid, reportFile, taskKey);
|
pushTaskFile(reportFile, taskKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void pushTaskFile(final String taskId, final File logFile, String taskKey)
|
private void pushTaskFile(final File logFile, String taskKey)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
AzureUtils.retryAzureOperation(
|
AzureUtils.retryAzureOperation(
|
||||||
|
|
|
@ -60,8 +60,6 @@ import java.util.concurrent.TimeUnit;
|
||||||
public class KafkaSimpleConsumer
|
public class KafkaSimpleConsumer
|
||||||
{
|
{
|
||||||
|
|
||||||
public static final List<BytesMessageWithOffset> EMPTY_MSGS = new ArrayList<>();
|
|
||||||
|
|
||||||
private static final Logger log = new Logger(KafkaSimpleConsumer.class);
|
private static final Logger log = new Logger(KafkaSimpleConsumer.class);
|
||||||
|
|
||||||
private final List<HostAndPort> allBrokers;
|
private final List<HostAndPort> allBrokers;
|
||||||
|
@ -274,7 +272,7 @@ public class KafkaSimpleConsumer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return response != null ? filterAndDecode(response.messageSet(topic, partitionId), offset) : EMPTY_MSGS;
|
return filterAndDecode(response.messageSet(topic, partitionId), offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void stopConsumer()
|
private void stopConsumer()
|
||||||
|
|
|
@ -70,8 +70,7 @@ public class MapVirtualColumnSelectTest
|
||||||
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
new DefaultObjectMapper(),
|
new DefaultObjectMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
selectConfigSupplier
|
|
||||||
),
|
),
|
||||||
new SelectQueryEngine(),
|
new SelectQueryEngine(),
|
||||||
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
||||||
|
|
|
@ -91,7 +91,8 @@ public class CoordinatorBasicAuthenticatorMetadataStorageUpdater implements Basi
|
||||||
BasicAuthCommonCacheConfig commonCacheConfig,
|
BasicAuthCommonCacheConfig commonCacheConfig,
|
||||||
@Smile ObjectMapper objectMapper,
|
@Smile ObjectMapper objectMapper,
|
||||||
BasicAuthenticatorCacheNotifier cacheNotifier,
|
BasicAuthenticatorCacheNotifier cacheNotifier,
|
||||||
ConfigManager configManager // ConfigManager creates the db table we need, set a dependency here
|
ConfigManager configManager // -V6022 (unused parameter): ConfigManager creates the db table we need,
|
||||||
|
// set a dependency here
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.exec = Execs.scheduledSingleThreaded("CoordinatorBasicAuthenticatorMetadataStorageUpdater-Exec--%d");
|
this.exec = Execs.scheduledSingleThreaded("CoordinatorBasicAuthenticatorMetadataStorageUpdater-Exec--%d");
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdater implements BasicAu
|
||||||
BasicAuthCommonCacheConfig commonCacheConfig,
|
BasicAuthCommonCacheConfig commonCacheConfig,
|
||||||
@Smile ObjectMapper objectMapper,
|
@Smile ObjectMapper objectMapper,
|
||||||
BasicAuthorizerCacheNotifier cacheNotifier,
|
BasicAuthorizerCacheNotifier cacheNotifier,
|
||||||
ConfigManager configManager // ConfigManager creates the db table we need, set a dependency here
|
ConfigManager configManager // -V6022: ConfigManager creates the db table we need, set a dependency here
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.exec = Execs.scheduledSingleThreaded("CoordinatorBasicAuthorizerMetadataStorageUpdater-Exec--%d");
|
this.exec = Execs.scheduledSingleThreaded("CoordinatorBasicAuthorizerMetadataStorageUpdater-Exec--%d");
|
||||||
|
|
|
@ -52,7 +52,7 @@ public class GoogleDataSegmentKiller implements DataSegmentKiller
|
||||||
Map<String, Object> loadSpec = segment.getLoadSpec();
|
Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||||
final String bucket = MapUtils.getString(loadSpec, "bucket");
|
final String bucket = MapUtils.getString(loadSpec, "bucket");
|
||||||
final String indexPath = MapUtils.getString(loadSpec, "path");
|
final String indexPath = MapUtils.getString(loadSpec, "path");
|
||||||
final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf('/')) + "/descriptor.json";
|
final String descriptorPath = DataSegmentKiller.descriptorPath(indexPath);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
deleteIfPresent(bucket, indexPath);
|
deleteIfPresent(bucket, indexPath);
|
||||||
|
|
|
@ -25,15 +25,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class GoogleUtils
|
public class GoogleUtils
|
||||||
{
|
{
|
||||||
public static String toFilename(String path)
|
|
||||||
{
|
|
||||||
return path.substring(path.lastIndexOf('/') + 1); // characters after last '/'
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String indexZipForSegmentPath(String path)
|
|
||||||
{
|
|
||||||
return path.substring(0, path.lastIndexOf('/')) + "/index.zip";
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isRetryable(Throwable t)
|
public static boolean isRetryable(Throwable t)
|
||||||
{
|
{
|
||||||
|
|
|
@ -24,6 +24,7 @@ import com.google.api.client.googleapis.testing.json.GoogleJsonResponseException
|
||||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
import com.google.api.client.json.jackson2.JacksonFactory;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.druid.java.util.common.Intervals;
|
import org.apache.druid.java.util.common.Intervals;
|
||||||
|
import org.apache.druid.segment.loading.DataSegmentKiller;
|
||||||
import org.apache.druid.segment.loading.SegmentLoadingException;
|
import org.apache.druid.segment.loading.SegmentLoadingException;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
import org.apache.druid.timeline.partition.NoneShardSpec;
|
import org.apache.druid.timeline.partition.NoneShardSpec;
|
||||||
|
@ -40,7 +41,7 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
private static final String bucket = "bucket";
|
private static final String bucket = "bucket";
|
||||||
private static final String indexPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
private static final String indexPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
||||||
private static final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf('/')) + "/descriptor.json";
|
private static final String descriptorPath = DataSegmentKiller.descriptorPath(indexPath);
|
||||||
|
|
||||||
private static final DataSegment dataSegment = new DataSegment(
|
private static final DataSegment dataSegment = new DataSegment(
|
||||||
"test",
|
"test",
|
||||||
|
|
|
@ -419,7 +419,7 @@ public class ApproximateHistogram
|
||||||
// use unused slot to shift array left or right and make space for the new bin to insert
|
// use unused slot to shift array left or right and make space for the new bin to insert
|
||||||
if (insertAt < unusedIndex) {
|
if (insertAt < unusedIndex) {
|
||||||
shiftRight(insertAt, unusedIndex);
|
shiftRight(insertAt, unusedIndex);
|
||||||
} else if (insertAt >= unusedIndex) {
|
} else {
|
||||||
shiftLeft(unusedIndex, insertAt - 1);
|
shiftLeft(unusedIndex, insertAt - 1);
|
||||||
insertAt--;
|
insertAt--;
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,7 +166,6 @@ public class OrcStructConverter
|
||||||
fieldIndexCache.put(fields.get(i), i);
|
fieldIndexCache.put(fields.get(i), i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
WritableComparable wc = struct.getFieldValue(fieldName);
|
|
||||||
|
|
||||||
int fieldIndex = fieldIndexCache.getOrDefault(fieldName, -1);
|
int fieldIndex = fieldIndexCache.getOrDefault(fieldName, -1);
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class ParquetAvroHadoopInputRowParser implements InputRowParser<GenericRe
|
||||||
this.binaryAsString = binaryAsString == null ? false : binaryAsString;
|
this.binaryAsString = binaryAsString == null ? false : binaryAsString;
|
||||||
|
|
||||||
final JSONPathSpec flattenSpec;
|
final JSONPathSpec flattenSpec;
|
||||||
if (parseSpec != null && (parseSpec instanceof AvroParseSpec)) {
|
if (parseSpec instanceof AvroParseSpec) {
|
||||||
flattenSpec = ((AvroParseSpec) parseSpec).getFlattenSpec();
|
flattenSpec = ((AvroParseSpec) parseSpec).getFlattenSpec();
|
||||||
} else {
|
} else {
|
||||||
flattenSpec = JSONPathSpec.DEFAULT;
|
flattenSpec = JSONPathSpec.DEFAULT;
|
||||||
|
|
|
@ -397,7 +397,7 @@ class ParquetGroupConverter
|
||||||
return bytes;
|
return bytes;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
throw new RE("Unknown primitive conversion: %s", ot.name());
|
throw new RE("Unknown primitive conversion: %s", pt.getPrimitiveTypeName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -710,8 +710,7 @@ public final class ProtoTestEventWrapper
|
||||||
}
|
}
|
||||||
org.apache.druid.data.input.protobuf.ProtoTestEventWrapper.ProtoTestEvent.Foo other = (org.apache.druid.data.input.protobuf.ProtoTestEventWrapper.ProtoTestEvent.Foo) obj;
|
org.apache.druid.data.input.protobuf.ProtoTestEventWrapper.ProtoTestEvent.Foo other = (org.apache.druid.data.input.protobuf.ProtoTestEventWrapper.ProtoTestEvent.Foo) obj;
|
||||||
|
|
||||||
boolean result = true;
|
boolean result = hasBar() == other.hasBar();
|
||||||
result = result && (hasBar() == other.hasBar());
|
|
||||||
if (hasBar()) {
|
if (hasBar()) {
|
||||||
result = result && getBar()
|
result = result && getBar()
|
||||||
.equals(other.getBar());
|
.equals(other.getBar());
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class S3DataSegmentKiller implements DataSegmentKiller
|
||||||
Map<String, Object> loadSpec = segment.getLoadSpec();
|
Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||||
String s3Bucket = MapUtils.getString(loadSpec, "bucket");
|
String s3Bucket = MapUtils.getString(loadSpec, "bucket");
|
||||||
String s3Path = MapUtils.getString(loadSpec, "key");
|
String s3Path = MapUtils.getString(loadSpec, "key");
|
||||||
String s3DescriptorPath = descriptorPathForSegmentPath(s3Path);
|
String s3DescriptorPath = DataSegmentKiller.descriptorPath(s3Path);
|
||||||
|
|
||||||
if (s3Client.doesObjectExist(s3Bucket, s3Path)) {
|
if (s3Client.doesObjectExist(s3Bucket, s3Path)) {
|
||||||
log.info("Removing index file[s3://%s/%s] from s3!", s3Bucket, s3Path);
|
log.info("Removing index file[s3://%s/%s] from s3!", s3Bucket, s3Path);
|
||||||
|
@ -68,11 +68,6 @@ public class S3DataSegmentKiller implements DataSegmentKiller
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String descriptorPathForSegmentPath(String s3Path)
|
|
||||||
{
|
|
||||||
return s3Path.substring(0, s3Path.lastIndexOf('/')) + "/descriptor.json";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void killAll()
|
public void killAll()
|
||||||
{
|
{
|
||||||
|
|
|
@ -172,23 +172,6 @@ public class S3Utils
|
||||||
) + "/index.zip";
|
) + "/index.zip";
|
||||||
}
|
}
|
||||||
|
|
||||||
static String indexZipForSegmentPath(String s3Path)
|
|
||||||
{
|
|
||||||
return s3Path.substring(0, s3Path.lastIndexOf('/')) + "/index.zip";
|
|
||||||
}
|
|
||||||
|
|
||||||
static String toFilename(String key)
|
|
||||||
{
|
|
||||||
return toFilename(key, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
static String toFilename(String key, final String suffix)
|
|
||||||
{
|
|
||||||
String filename = key.substring(key.lastIndexOf('/') + 1); // characters after last '/'
|
|
||||||
filename = filename.substring(0, filename.length() - suffix.length()); // remove the suffix from the end
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
static AccessControlList grantFullControlToBucketOwner(ServerSideEncryptingAmazonS3 s3Client, String bucket)
|
static AccessControlList grantFullControlToBucketOwner(ServerSideEncryptingAmazonS3 s3Client, String bucket)
|
||||||
{
|
{
|
||||||
final AccessControlList acl = s3Client.getBucketAcl(bucket);
|
final AccessControlList acl = s3Client.getBucketAcl(bucket);
|
||||||
|
|
|
@ -154,7 +154,7 @@ public class HyperLogLogCollectorBenchmark extends SimpleBenchmark
|
||||||
final ByteBuffer buf = allocateEmptyHLLBuffer(targetIsDirect, alignTarget, 0);
|
final ByteBuffer buf = allocateEmptyHLLBuffer(targetIsDirect, alignTarget, 0);
|
||||||
|
|
||||||
for (int k = 0; k < reps; ++k) {
|
for (int k = 0; k < reps; ++k) {
|
||||||
for (int i = 0; i < count; ++i) {
|
for (int i = 0; i < count; ++i) { //-V6017: The 'k' counter is not used the nested loop because it's just reps.
|
||||||
final int pos = positions[i];
|
final int pos = positions[i];
|
||||||
final int size = sizes[i];
|
final int size = sizes[i];
|
||||||
|
|
||||||
|
|
|
@ -724,8 +724,8 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
final RemoteTaskRunnerWorkItem removed = completeTasks.remove(taskId);
|
final RemoteTaskRunnerWorkItem removed = completeTasks.remove(taskId);
|
||||||
final Worker worker = removed.getWorker();
|
final Worker worker;
|
||||||
if (removed == null || worker == null) {
|
if (removed == null || (worker = removed.getWorker()) == null) {
|
||||||
log.makeAlert("WTF?! Asked to cleanup nonexistent task")
|
log.makeAlert("WTF?! Asked to cleanup nonexistent task")
|
||||||
.addData("taskId", taskId)
|
.addData("taskId", taskId)
|
||||||
.emit();
|
.emit();
|
||||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.druid.indexing.overlord.config.HttpRemoteTaskRunnerConfig;
|
||||||
import org.apache.druid.indexing.overlord.setup.DefaultWorkerBehaviorConfig;
|
import org.apache.druid.indexing.overlord.setup.DefaultWorkerBehaviorConfig;
|
||||||
import org.apache.druid.indexing.worker.TaskAnnouncement;
|
import org.apache.druid.indexing.worker.TaskAnnouncement;
|
||||||
import org.apache.druid.indexing.worker.Worker;
|
import org.apache.druid.indexing.worker.Worker;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
import org.apache.druid.java.util.common.ISE;
|
import org.apache.druid.java.util.common.ISE;
|
||||||
import org.apache.druid.java.util.common.concurrent.Execs;
|
import org.apache.druid.java.util.common.concurrent.Execs;
|
||||||
import org.apache.druid.java.util.http.client.HttpClient;
|
import org.apache.druid.java.util.http.client.HttpClient;
|
||||||
|
@ -1285,8 +1286,18 @@ public class HttpRemoteTaskRunnerTest
|
||||||
{
|
{
|
||||||
return new WorkerHolder(smileMapper, httpClient, config, workersSyncExec, listener, worker)
|
return new WorkerHolder(smileMapper, httpClient, config, workersSyncExec, listener, worker)
|
||||||
{
|
{
|
||||||
private final String workerHost = worker.getHost().substring(0, worker.getHost().indexOf(':'));
|
private final String workerHost;
|
||||||
private final int workerPort = Integer.parseInt(worker.getHost().substring(worker.getHost().indexOf(':') + 1));
|
private final int workerPort;
|
||||||
|
|
||||||
|
{
|
||||||
|
String hostAndPort = worker.getHost();
|
||||||
|
int colonIndex = hostAndPort.indexOf(':');
|
||||||
|
if (colonIndex == -1) {
|
||||||
|
throw new IAE("Invalid host and port: [%s]", colonIndex);
|
||||||
|
}
|
||||||
|
workerHost = hostAndPort.substring(0, colonIndex);
|
||||||
|
workerPort = Integer.parseInt(hostAndPort.substring(colonIndex + 1));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void start()
|
public void start()
|
||||||
|
|
|
@ -241,11 +241,6 @@ public class CoordinatorResourceTestClient
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> results = jsonMapper.readValue(
|
|
||||||
response.getContent(),
|
|
||||||
new TypeReference<Map<String, Object>>(){}
|
|
||||||
);
|
|
||||||
|
|
||||||
StatusResponseHolder response2 = httpClient.go(
|
StatusResponseHolder response2 = httpClient.go(
|
||||||
new Request(HttpMethod.POST, new URL(url)).setContent(
|
new Request(HttpMethod.POST, new URL(url)).setContent(
|
||||||
"application/json",
|
"application/json",
|
||||||
|
|
|
@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Functions;
|
import com.google.common.base.Functions;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Supplier;
|
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Ordering;
|
import com.google.common.collect.Ordering;
|
||||||
|
@ -82,18 +81,16 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
|
||||||
|
|
||||||
public SelectQueryQueryToolChest(
|
public SelectQueryQueryToolChest(
|
||||||
ObjectMapper jsonMapper,
|
ObjectMapper jsonMapper,
|
||||||
IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator,
|
IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator
|
||||||
Supplier<SelectQueryConfig> configSupplier
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this(jsonMapper, intervalChunkingQueryRunnerDecorator, configSupplier, DefaultSelectQueryMetricsFactory.instance());
|
this(jsonMapper, intervalChunkingQueryRunnerDecorator, DefaultSelectQueryMetricsFactory.instance());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public SelectQueryQueryToolChest(
|
public SelectQueryQueryToolChest(
|
||||||
ObjectMapper jsonMapper,
|
ObjectMapper jsonMapper,
|
||||||
IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator,
|
IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator,
|
||||||
Supplier<SelectQueryConfig> configSupplier,
|
|
||||||
SelectQueryMetricsFactory queryMetricsFactory
|
SelectQueryMetricsFactory queryMetricsFactory
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
|
|
@ -194,15 +194,13 @@ public class AggregationTestHelper implements Closeable
|
||||||
|
|
||||||
SelectQueryQueryToolChest toolchest = new SelectQueryQueryToolChest(
|
SelectQueryQueryToolChest toolchest = new SelectQueryQueryToolChest(
|
||||||
TestHelper.makeJsonMapper(),
|
TestHelper.makeJsonMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
configSupplier
|
|
||||||
);
|
);
|
||||||
|
|
||||||
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
TestHelper.makeJsonMapper(),
|
TestHelper.makeJsonMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
configSupplier
|
|
||||||
),
|
),
|
||||||
new SelectQueryEngine(
|
new SelectQueryEngine(
|
||||||
),
|
),
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.druid.query.search;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import org.apache.druid.java.util.common.DateTimes;
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||||
import org.apache.druid.query.Result;
|
import org.apache.druid.query.Result;
|
||||||
import org.apache.druid.query.ordering.StringComparators;
|
import org.apache.druid.query.ordering.StringComparators;
|
||||||
|
@ -305,8 +306,11 @@ public class SearchBinaryFnTest
|
||||||
{
|
{
|
||||||
List<SearchHit> result = new ArrayList<>();
|
List<SearchHit> result = new ArrayList<>();
|
||||||
for (String hit : hits) {
|
for (String hit : hits) {
|
||||||
int index = hit.indexOf(':');
|
int colonIndex = hit.indexOf(':');
|
||||||
result.add(new SearchHit(hit.substring(0, index), hit.substring(index + 1)));
|
if (colonIndex == -1) {
|
||||||
|
throw new IAE("Invalid hit: [%s]", hit);
|
||||||
|
}
|
||||||
|
result.add(new SearchHit(hit.substring(0, colonIndex), hit.substring(colonIndex + 1)));
|
||||||
}
|
}
|
||||||
Collections.sort(result, comparator);
|
Collections.sort(result, comparator);
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -75,8 +75,7 @@ public class MultiSegmentSelectQueryTest
|
||||||
|
|
||||||
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
||||||
new DefaultObjectMapper(),
|
new DefaultObjectMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
configSupplier
|
|
||||||
);
|
);
|
||||||
|
|
||||||
private static final QueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
private static final QueryRunnerFactory factory = new SelectQueryRunnerFactory(
|
||||||
|
|
|
@ -38,8 +38,7 @@ public class SelectQueryQueryToolChestTest
|
||||||
|
|
||||||
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
||||||
new DefaultObjectMapper(),
|
new DefaultObjectMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
configSupplier
|
|
||||||
);
|
);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -129,8 +129,7 @@ public class SelectQueryRunnerTest
|
||||||
|
|
||||||
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
|
||||||
new DefaultObjectMapper(),
|
new DefaultObjectMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
configSupplier
|
|
||||||
);
|
);
|
||||||
|
|
||||||
@Parameterized.Parameters(name = "{0}:descending={1}")
|
@Parameterized.Parameters(name = "{0}:descending={1}")
|
||||||
|
|
|
@ -316,16 +316,12 @@ public class TestHelper
|
||||||
final Object actualValue = actualMap.get(key);
|
final Object actualValue = actualMap.get(key);
|
||||||
|
|
||||||
if (expectedValue instanceof Float || expectedValue instanceof Double) {
|
if (expectedValue instanceof Float || expectedValue instanceof Double) {
|
||||||
if (expectedValue == null) {
|
Assert.assertEquals(
|
||||||
Assert.assertNull(actualValue);
|
StringUtils.format("%s: key[%s]", msg, key),
|
||||||
} else {
|
((Number) expectedValue).doubleValue(),
|
||||||
Assert.assertEquals(
|
((Number) actualValue).doubleValue(),
|
||||||
StringUtils.format("%s: key[%s]", msg, key),
|
Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
|
||||||
((Number) expectedValue).doubleValue(),
|
);
|
||||||
((Number) actualValue).doubleValue(),
|
|
||||||
Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format("%s: key[%s]", msg, key),
|
StringUtils.format("%s: key[%s]", msg, key),
|
||||||
|
|
|
@ -127,7 +127,7 @@ public class ServerSelector implements DiscoverySelector<QueryableDruidServer>
|
||||||
.map(server -> server.getServer().getMetadata())
|
.map(server -> server.getServer().getMetadata())
|
||||||
.forEach(candidates::add);
|
.forEach(candidates::add);
|
||||||
|
|
||||||
if (candidates.size() < numCandidates) {
|
if (candidates.size() < numCandidates) { //-V6007: false alarm due to a bug in PVS-Studio
|
||||||
strategy.pick(realtimeServers, segment.get(), numCandidates - candidates.size())
|
strategy.pick(realtimeServers, segment.get(), numCandidates - candidates.size())
|
||||||
.stream()
|
.stream()
|
||||||
.map(server -> server.getServer().getMetadata())
|
.map(server -> server.getServer().getMetadata())
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.druid.server.http;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.net.HostAndPort;
|
import com.google.common.net.HostAndPort;
|
||||||
|
import org.apache.druid.java.util.common.IAE;
|
||||||
import org.apache.druid.java.util.common.StringUtils;
|
import org.apache.druid.java.util.common.StringUtils;
|
||||||
|
|
||||||
public class HostAndPortWithScheme
|
public class HostAndPortWithScheme
|
||||||
|
@ -42,9 +43,13 @@ public class HostAndPortWithScheme
|
||||||
public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString)
|
public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString)
|
||||||
{
|
{
|
||||||
if (hostPortMaybeSchemeString.startsWith("http")) {
|
if (hostPortMaybeSchemeString.startsWith("http")) {
|
||||||
|
int colonIndex = hostPortMaybeSchemeString.indexOf(':');
|
||||||
|
if (colonIndex == -1) {
|
||||||
|
throw new IAE("Invalid host with scheme string: [%s]", hostPortMaybeSchemeString);
|
||||||
|
}
|
||||||
return HostAndPortWithScheme.fromString(
|
return HostAndPortWithScheme.fromString(
|
||||||
hostPortMaybeSchemeString.substring(0, hostPortMaybeSchemeString.indexOf(':')),
|
hostPortMaybeSchemeString.substring(0, colonIndex),
|
||||||
hostPortMaybeSchemeString.substring(hostPortMaybeSchemeString.indexOf(':') + 1)
|
hostPortMaybeSchemeString.substring(colonIndex + 1)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return HostAndPortWithScheme.fromString("http", hostPortMaybeSchemeString);
|
return HostAndPortWithScheme.fromString("http", hostPortMaybeSchemeString);
|
||||||
|
|
|
@ -1315,8 +1315,7 @@ public class CachingClusteredClientTest
|
||||||
getDefaultQueryRunner(),
|
getDefaultQueryRunner(),
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
JSON_MAPPER,
|
JSON_MAPPER,
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
SELECT_CONFIG_SUPPLIER
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
HashMap<String, Object> context = new HashMap<String, Object>();
|
HashMap<String, Object> context = new HashMap<String, Object>();
|
||||||
|
@ -1393,8 +1392,7 @@ public class CachingClusteredClientTest
|
||||||
getDefaultQueryRunner(),
|
getDefaultQueryRunner(),
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
JSON_MAPPER,
|
JSON_MAPPER,
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
SELECT_CONFIG_SUPPLIER
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
HashMap<String, Object> context = new HashMap<String, Object>();
|
HashMap<String, Object> context = new HashMap<String, Object>();
|
||||||
|
|
|
@ -92,8 +92,7 @@ public final class CachingClusteredClientTestUtils
|
||||||
SelectQuery.class,
|
SelectQuery.class,
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
objectMapper,
|
objectMapper,
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
selectConfigSupplier
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.put(
|
.put(
|
||||||
|
|
|
@ -92,10 +92,10 @@ public class ByteCountingLRUMapTest
|
||||||
final ByteBuffer k = ByteBuffer.allocate(1);
|
final ByteBuffer k = ByteBuffer.allocate(1);
|
||||||
|
|
||||||
assertMapValues(0, 0, 0);
|
assertMapValues(0, 0, 0);
|
||||||
map.put(k, new byte[1]);
|
map.put(k, new byte[1]); //-V6033: suppress "An item with the same key has already been added"
|
||||||
map.put(k, new byte[2]);
|
map.put(k, new byte[2]); //-V6033
|
||||||
map.put(k, new byte[5]);
|
map.put(k, new byte[5]); //-V6033
|
||||||
map.put(k, new byte[3]);
|
map.put(k, new byte[3]); //-V6033
|
||||||
assertMapValues(1, 4, 0);
|
assertMapValues(1, 4, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -160,10 +160,7 @@ public class CuratorDruidCoordinatorTest extends CuratorTestBase
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
new Duration("PT0s")
|
||||||
false,
|
|
||||||
new Duration("PT0s"),
|
|
||||||
Duration.millis(10)
|
|
||||||
);
|
);
|
||||||
sourceLoadQueueChildrenCache = new PathChildrenCache(
|
sourceLoadQueueChildrenCache = new PathChildrenCache(
|
||||||
curator,
|
curator,
|
||||||
|
|
|
@ -137,10 +137,7 @@ public class DruidCoordinatorTest extends CuratorTestBase
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
new Duration("PT0s")
|
||||||
false,
|
|
||||||
new Duration("PT0s"),
|
|
||||||
Duration.millis(10)
|
|
||||||
);
|
);
|
||||||
pathChildrenCache = new PathChildrenCache(
|
pathChildrenCache = new PathChildrenCache(
|
||||||
curator,
|
curator,
|
||||||
|
|
|
@ -82,10 +82,7 @@ public class HttpLoadQueuePeonTest
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
Duration.ZERO
|
||||||
false,
|
|
||||||
Duration.ZERO,
|
|
||||||
Duration.millis(10)
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,10 +97,7 @@ public class LoadQueuePeonTest extends CuratorTestBase
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
Duration.millis(0)
|
||||||
false,
|
|
||||||
Duration.millis(0),
|
|
||||||
Duration.millis(10)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -295,10 +292,7 @@ public class LoadQueuePeonTest extends CuratorTestBase
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
new Duration("PT1s")
|
||||||
false,
|
|
||||||
new Duration("PT1s"),
|
|
||||||
Duration.millis(10)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -46,10 +46,7 @@ public class LoadQueuePeonTester extends CuratorLoadQueuePeon
|
||||||
null,
|
null,
|
||||||
10,
|
10,
|
||||||
null,
|
null,
|
||||||
false,
|
new Duration("PT1s")
|
||||||
false,
|
|
||||||
new Duration("PT1s"),
|
|
||||||
Duration.millis(10)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,10 +44,7 @@ public class TestDruidCoordinatorConfig extends DruidCoordinatorConfig
|
||||||
Duration coordinatorKillDurationToRetain,
|
Duration coordinatorKillDurationToRetain,
|
||||||
int coordinatorKillMaxSegments,
|
int coordinatorKillMaxSegments,
|
||||||
String consoleStatic,
|
String consoleStatic,
|
||||||
boolean mergeSegments,
|
Duration getLoadQueuePeonRepeatDelay
|
||||||
boolean convertSegments,
|
|
||||||
Duration getLoadQueuePeonRepeatDelay,
|
|
||||||
Duration CuratorCreateZkNodesRepeatDelay
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.coordinatorStartDelay = coordinatorStartDelay;
|
this.coordinatorStartDelay = coordinatorStartDelay;
|
||||||
|
|
|
@ -111,10 +111,7 @@ public class DruidCoordinatorSegmentKillerTest
|
||||||
Duration.parse("PT86400S"),
|
Duration.parse("PT86400S"),
|
||||||
1000,
|
1000,
|
||||||
null,
|
null,
|
||||||
false,
|
Duration.ZERO
|
||||||
false,
|
|
||||||
Duration.ZERO,
|
|
||||||
Duration.millis(10)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -119,8 +119,8 @@ public class SecurityResourceFilterTest extends ResourceFilterTestHelper
|
||||||
Assert.fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
catch (ForbiddenException e) {
|
catch (ForbiddenException e) {
|
||||||
|
EasyMock.verify(req, request, authorizerMapper);
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
EasyMock.verify(req, request, authorizerMapper);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -516,8 +516,7 @@ public class CalciteTests
|
||||||
new SelectQueryRunnerFactory(
|
new SelectQueryRunnerFactory(
|
||||||
new SelectQueryQueryToolChest(
|
new SelectQueryQueryToolChest(
|
||||||
TestHelper.makeJsonMapper(),
|
TestHelper.makeJsonMapper(),
|
||||||
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator(),
|
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
|
||||||
SELECT_CONFIG_SUPPLIER
|
|
||||||
),
|
),
|
||||||
new SelectQueryEngine(),
|
new SelectQueryEngine(),
|
||||||
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
||||||
|
|
Loading…
Reference in New Issue