mirror of https://github.com/apache/druid.git
Add back `UnnecessaryFullyQualifiedName` rule in pmd ruleset (#17570)
* Add back UnnecessaryFullyQualifiedName rule in pmd ruleset * Fix checkstyle
This commit is contained in:
parent
9bdb3d205c
commit
cd6083fb94
|
@ -29,4 +29,5 @@ This ruleset defines the PMD rules for the Apache Druid project.
|
|||
|
||||
<rule ref="category/java/codestyle.xml/UnnecessaryImport" />
|
||||
<rule ref="category/java/codestyle.xml/TooManyStaticImports" />
|
||||
<rule ref="category/java/codestyle.xml/UnnecessaryFullyQualifiedName"/>
|
||||
</ruleset>
|
||||
|
|
|
@ -227,7 +227,7 @@ public class OssUtils
|
|||
throws Exception
|
||||
{
|
||||
DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucket).withKeys(keysToDelete);
|
||||
OssUtils.retry(() -> {
|
||||
retry(() -> {
|
||||
client.deleteObjects(deleteRequest);
|
||||
return null;
|
||||
});
|
||||
|
|
|
@ -128,7 +128,7 @@ public class Utils
|
|||
boolean strictNumberParse
|
||||
)
|
||||
{
|
||||
CompressedBigDecimal compressedBigDecimal = Utils.objToCompressedBigDecimal(obj, strictNumberParse);
|
||||
CompressedBigDecimal compressedBigDecimal = objToCompressedBigDecimal(obj, strictNumberParse);
|
||||
|
||||
if (compressedBigDecimal != null) {
|
||||
return scaleIfNeeded(compressedBigDecimal, scale);
|
||||
|
|
|
@ -34,7 +34,7 @@ public enum PeonPhase
|
|||
UNKNOWN("Unknown"),
|
||||
RUNNING("Running");
|
||||
|
||||
private static final Map<String, PeonPhase> PHASE_MAP = Arrays.stream(PeonPhase.values())
|
||||
private static final Map<String, PeonPhase> PHASE_MAP = Arrays.stream(values())
|
||||
.collect(Collectors.toMap(
|
||||
PeonPhase::getPhase,
|
||||
Function.identity()
|
||||
|
|
|
@ -173,9 +173,7 @@ public class RabbitStreamSupervisorTuningConfig extends RabbitStreamIndexTaskTun
|
|||
public Duration getRepartitionTransitionDuration()
|
||||
{
|
||||
// just return a default for now.
|
||||
return SeekableStreamSupervisorTuningConfig.defaultDuration(
|
||||
null,
|
||||
SeekableStreamSupervisorTuningConfig.DEFAULT_REPARTITION_TRANSITION_DURATION);
|
||||
return SeekableStreamSupervisorTuningConfig.defaultDuration(null, DEFAULT_REPARTITION_TRANSITION_DURATION);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -38,15 +38,15 @@ public class HllSketchHolder
|
|||
if (obj instanceof HllSketchHolder) {
|
||||
return (HllSketchHolder) obj;
|
||||
} else if (obj instanceof HllSketch) {
|
||||
return HllSketchHolder.of((HllSketch) obj);
|
||||
return of((HllSketch) obj);
|
||||
} else if (obj instanceof Union) {
|
||||
return HllSketchHolder.of((Union) obj);
|
||||
return of((Union) obj);
|
||||
} else if (obj instanceof byte[]) {
|
||||
return HllSketchHolder.of(HllSketch.heapify((byte[]) obj));
|
||||
return of(HllSketch.heapify((byte[]) obj));
|
||||
} else if (obj instanceof Memory) {
|
||||
return HllSketchHolder.of(HllSketch.wrap((Memory) obj));
|
||||
return of(HllSketch.wrap((Memory) obj));
|
||||
} else if (obj instanceof String) {
|
||||
return HllSketchHolder.of(HllSketch.heapify(StringUtils.decodeBase64(StringUtils.toUtf8((String) obj))));
|
||||
return of(HllSketch.heapify(StringUtils.decodeBase64(StringUtils.toUtf8((String) obj))));
|
||||
}
|
||||
|
||||
throw new ISE("Object is not of a type[%s] that can be deserialized to sketch.", obj.getClass());
|
||||
|
|
|
@ -45,7 +45,7 @@ import java.util.Comparator;
|
|||
*/
|
||||
public class SketchHolder
|
||||
{
|
||||
public static final SketchHolder EMPTY = SketchHolder.of(
|
||||
public static final SketchHolder EMPTY = of(
|
||||
Sketches.updateSketchBuilder()
|
||||
.build()
|
||||
.compact(true, null)
|
||||
|
@ -195,7 +195,7 @@ public class SketchHolder
|
|||
Union union = (Union) SetOperation.builder().setNominalEntries(nomEntries).build(Family.UNION);
|
||||
holder1.updateUnion(union);
|
||||
holder2.updateUnion(union);
|
||||
return SketchHolder.of(union);
|
||||
return of(union);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,15 +208,15 @@ public class SketchHolder
|
|||
public static SketchHolder deserialize(Object serializedSketch)
|
||||
{
|
||||
if (serializedSketch instanceof String) {
|
||||
return SketchHolder.of(deserializeFromBase64EncodedString((String) serializedSketch));
|
||||
return of(deserializeFromBase64EncodedString((String) serializedSketch));
|
||||
} else if (serializedSketch instanceof byte[]) {
|
||||
return SketchHolder.of(deserializeFromByteArray((byte[]) serializedSketch));
|
||||
return of(deserializeFromByteArray((byte[]) serializedSketch));
|
||||
} else if (serializedSketch instanceof SketchHolder) {
|
||||
return (SketchHolder) serializedSketch;
|
||||
} else if (serializedSketch instanceof Sketch
|
||||
|| serializedSketch instanceof Union
|
||||
|| serializedSketch instanceof Memory) {
|
||||
return SketchHolder.of(serializedSketch);
|
||||
return of(serializedSketch);
|
||||
}
|
||||
|
||||
throw new ISE(
|
||||
|
@ -228,9 +228,9 @@ public class SketchHolder
|
|||
public static SketchHolder deserializeSafe(Object serializedSketch)
|
||||
{
|
||||
if (serializedSketch instanceof String) {
|
||||
return SketchHolder.of(deserializeFromBase64EncodedStringSafe((String) serializedSketch));
|
||||
return of(deserializeFromBase64EncodedStringSafe((String) serializedSketch));
|
||||
} else if (serializedSketch instanceof byte[]) {
|
||||
return SketchHolder.of(deserializeFromByteArraySafe((byte[]) serializedSketch));
|
||||
return of(deserializeFromByteArraySafe((byte[]) serializedSketch));
|
||||
}
|
||||
|
||||
return deserialize(serializedSketch);
|
||||
|
@ -285,13 +285,13 @@ public class SketchHolder
|
|||
for (Object o : holders) {
|
||||
((SketchHolder) o).updateUnion(union);
|
||||
}
|
||||
return SketchHolder.of(union);
|
||||
return of(union);
|
||||
case INTERSECT:
|
||||
Intersection intersection = (Intersection) SetOperation.builder().setNominalEntries(sketchSize).build(Family.INTERSECTION);
|
||||
for (Object o : holders) {
|
||||
intersection.intersect(((SketchHolder) o).getSketch());
|
||||
}
|
||||
return SketchHolder.of(intersection.getResult(false, null));
|
||||
return of(intersection.getResult(false, null));
|
||||
case NOT:
|
||||
if (holders.length < 1) {
|
||||
throw new IllegalArgumentException("A-Not-B requires at least 1 sketch");
|
||||
|
@ -306,7 +306,7 @@ public class SketchHolder
|
|||
AnotB anotb = (AnotB) SetOperation.builder().setNominalEntries(sketchSize).build(Family.A_NOT_B);
|
||||
result = anotb.aNotB(result, ((SketchHolder) holders[i]).getSketch());
|
||||
}
|
||||
return SketchHolder.of(result);
|
||||
return of(result);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown sketch operation " + func);
|
||||
}
|
||||
|
|
|
@ -172,7 +172,7 @@ public class BasicAuthUtils
|
|||
userMap = new HashMap<>();
|
||||
} else {
|
||||
try {
|
||||
userMap = objectMapper.readValue(userMapBytes, BasicAuthUtils.AUTHORIZER_USER_MAP_TYPE_REFERENCE);
|
||||
userMap = objectMapper.readValue(userMapBytes, AUTHORIZER_USER_MAP_TYPE_REFERENCE);
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
throw new RuntimeException("Couldn't deserialize authorizer userMap!", ioe);
|
||||
|
@ -201,7 +201,7 @@ public class BasicAuthUtils
|
|||
groupMappingMap = new HashMap<>();
|
||||
} else {
|
||||
try {
|
||||
groupMappingMap = objectMapper.readValue(groupMappingMapBytes, BasicAuthUtils.AUTHORIZER_GROUP_MAPPING_MAP_TYPE_REFERENCE);
|
||||
groupMappingMap = objectMapper.readValue(groupMappingMapBytes, AUTHORIZER_GROUP_MAPPING_MAP_TYPE_REFERENCE);
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
throw new RuntimeException("Couldn't deserialize authorizer groupMappingMap!", ioe);
|
||||
|
@ -230,7 +230,7 @@ public class BasicAuthUtils
|
|||
roleMap = new HashMap<>();
|
||||
} else {
|
||||
try {
|
||||
roleMap = objectMapper.readValue(roleMapBytes, BasicAuthUtils.AUTHORIZER_ROLE_MAP_TYPE_REFERENCE);
|
||||
roleMap = objectMapper.readValue(roleMapBytes, AUTHORIZER_ROLE_MAP_TYPE_REFERENCE);
|
||||
}
|
||||
catch (IOException ioe) {
|
||||
throw new RuntimeException("Couldn't deserialize authorizer roleMap!", ioe);
|
||||
|
|
|
@ -79,7 +79,7 @@ public class CatalogException extends Exception
|
|||
public static CatalogException badRequest(String msg, Object...args)
|
||||
{
|
||||
return new CatalogException(
|
||||
CatalogException.INVALID_ERROR,
|
||||
INVALID_ERROR,
|
||||
Response.Status.BAD_REQUEST,
|
||||
msg,
|
||||
args
|
||||
|
|
|
@ -65,9 +65,9 @@ public class HdfsStorageDruidModule implements DruidModule
|
|||
{
|
||||
return Collections.singletonList(
|
||||
new SimpleModule().registerSubtypes(
|
||||
new NamedType(HdfsLoadSpec.class, HdfsStorageDruidModule.SCHEME),
|
||||
new NamedType(HdfsInputSource.class, HdfsStorageDruidModule.SCHEME),
|
||||
new NamedType(HdfsInputSourceFactory.class, HdfsStorageDruidModule.SCHEME)
|
||||
new NamedType(HdfsLoadSpec.class, SCHEME),
|
||||
new NamedType(HdfsInputSource.class, SCHEME),
|
||||
new NamedType(HdfsInputSourceFactory.class, SCHEME)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -169,7 +169,7 @@ public class KafkaSupervisorTuningConfig extends KafkaIndexTaskTuningConfig
|
|||
// just return a default for now.
|
||||
return SeekableStreamSupervisorTuningConfig.defaultDuration(
|
||||
null,
|
||||
SeekableStreamSupervisorTuningConfig.DEFAULT_REPARTITION_TRANSITION_DURATION
|
||||
DEFAULT_REPARTITION_TRANSITION_DURATION
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ public class DefaultK8sApiClient implements K8sApiClient
|
|||
try {
|
||||
while (watch.hasNext()) {
|
||||
Watch.Response<V1Pod> item = watch.next();
|
||||
if (item != null && item.type != null && !item.type.equals(WatchResult.BOOKMARK)) {
|
||||
if (item != null && item.type != null && !BOOKMARK.equals(item.type)) {
|
||||
DiscoveryDruidNodeAndResourceVersion result = null;
|
||||
if (item.object != null) {
|
||||
result = new DiscoveryDruidNodeAndResourceVersion(
|
||||
|
@ -150,7 +150,7 @@ public class DefaultK8sApiClient implements K8sApiClient
|
|||
result
|
||||
);
|
||||
return true;
|
||||
} else if (item != null && item.type != null && item.type.equals(WatchResult.BOOKMARK)) {
|
||||
} else if (item != null && item.type != null && BOOKMARK.equals(item.type)) {
|
||||
// Events with type BOOKMARK will only contain resourceVersion and no metadata. See
|
||||
// Kubernetes API documentation for details.
|
||||
LOGGER.debug("BOOKMARK event fired, no nothing, only update resourceVersion");
|
||||
|
|
|
@ -208,8 +208,8 @@ public class K8sDruidNodeAnnouncer implements DruidNodeAnnouncer
|
|||
"%s=%s,%s=%s",
|
||||
getClusterIdentifierAnnouncementLabel(),
|
||||
discoveryConfig.getClusterIdentifier(),
|
||||
K8sDruidNodeAnnouncer.getRoleAnnouncementLabel(nodeRole),
|
||||
K8sDruidNodeAnnouncer.ANNOUNCEMENT_DONE
|
||||
getRoleAnnouncementLabel(nodeRole),
|
||||
ANNOUNCEMENT_DONE
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -219,9 +219,9 @@ public class K8sDruidNodeAnnouncer implements DruidNodeAnnouncer
|
|||
"%s=%s,%s=%s,%s=%s",
|
||||
getClusterIdentifierAnnouncementLabel(),
|
||||
discoveryConfig.getClusterIdentifier(),
|
||||
K8sDruidNodeAnnouncer.getRoleAnnouncementLabel(nodeRole),
|
||||
K8sDruidNodeAnnouncer.ANNOUNCEMENT_DONE,
|
||||
K8sDruidNodeAnnouncer.getIdHashAnnouncementLabel(),
|
||||
getRoleAnnouncementLabel(nodeRole),
|
||||
ANNOUNCEMENT_DONE,
|
||||
getIdHashAnnouncementLabel(),
|
||||
hashEncodeStringForLabelValue(node.getHostAndPortToUse())
|
||||
);
|
||||
}
|
||||
|
|
|
@ -287,7 +287,7 @@ public class SegmentGeneratorFrameProcessorFactory
|
|||
@Override
|
||||
public AppendableIndexSpec getAppendableIndexSpec()
|
||||
{
|
||||
return TuningConfig.DEFAULT_APPENDABLE_INDEX;
|
||||
return DEFAULT_APPENDABLE_INDEX;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -346,7 +346,7 @@ public class SegmentGeneratorFrameProcessorFactory
|
|||
@Override
|
||||
public boolean isSkipBytesInMemoryOverheadCheck()
|
||||
{
|
||||
return TuningConfig.DEFAULT_SKIP_BYTES_IN_MEMORY_OVERHEAD_CHECK;
|
||||
return DEFAULT_SKIP_BYTES_IN_MEMORY_OVERHEAD_CHECK;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -111,7 +111,7 @@ public class QueryKitUtils
|
|||
return clusterBy;
|
||||
} else {
|
||||
final List<KeyColumn> newColumns = new ArrayList<>(clusterBy.getColumns().size() + 1);
|
||||
newColumns.add(new KeyColumn(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN, KeyOrder.ASCENDING));
|
||||
newColumns.add(new KeyColumn(SEGMENT_GRANULARITY_COLUMN, KeyOrder.ASCENDING));
|
||||
newColumns.addAll(clusterBy.getColumns());
|
||||
return new ClusterBy(newColumns, 1);
|
||||
}
|
||||
|
@ -123,10 +123,10 @@ public class QueryKitUtils
|
|||
*/
|
||||
public static void verifyRowSignature(final RowSignature signature)
|
||||
{
|
||||
if (signature.contains(QueryKitUtils.PARTITION_BOOST_COLUMN)) {
|
||||
throw new MSQException(new ColumnNameRestrictedFault(QueryKitUtils.PARTITION_BOOST_COLUMN));
|
||||
} else if (signature.contains(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN)) {
|
||||
throw new MSQException(new ColumnNameRestrictedFault(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN));
|
||||
if (signature.contains(PARTITION_BOOST_COLUMN)) {
|
||||
throw new MSQException(new ColumnNameRestrictedFault(PARTITION_BOOST_COLUMN));
|
||||
} else if (signature.contains(SEGMENT_GRANULARITY_COLUMN)) {
|
||||
throw new MSQException(new ColumnNameRestrictedFault(SEGMENT_GRANULARITY_COLUMN));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,7 +144,7 @@ public class QueryKitUtils
|
|||
} else {
|
||||
return RowSignature.builder()
|
||||
.addAll(signature)
|
||||
.add(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN, ColumnType.LONG)
|
||||
.add(SEGMENT_GRANULARITY_COLUMN, ColumnType.LONG)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
@ -194,8 +194,8 @@ public class QueryKitUtils
|
|||
public static VirtualColumn makeSegmentGranularityVirtualColumn(final ObjectMapper jsonMapper, final QueryContext queryContext)
|
||||
{
|
||||
final Granularity segmentGranularity =
|
||||
QueryKitUtils.getSegmentGranularityFromContext(jsonMapper, queryContext.asMap());
|
||||
final String timeColumnName = queryContext.getString(QueryKitUtils.CTX_TIME_COLUMN_NAME);
|
||||
getSegmentGranularityFromContext(jsonMapper, queryContext.asMap());
|
||||
final String timeColumnName = queryContext.getString(CTX_TIME_COLUMN_NAME);
|
||||
|
||||
if (timeColumnName == null || Granularities.ALL.equals(segmentGranularity)) {
|
||||
return null;
|
||||
|
@ -213,7 +213,7 @@ public class QueryKitUtils
|
|||
}
|
||||
|
||||
return new ExpressionVirtualColumn(
|
||||
QueryKitUtils.SEGMENT_GRANULARITY_COLUMN,
|
||||
SEGMENT_GRANULARITY_COLUMN,
|
||||
StringUtils.format(
|
||||
"timestamp_floor(%s, %s)",
|
||||
CalciteSqlDialect.DEFAULT.quoteIdentifier(timeColumnName),
|
||||
|
|
|
@ -50,7 +50,7 @@ public enum MSQMode
|
|||
@Nullable
|
||||
public static MSQMode fromString(String str)
|
||||
{
|
||||
for (MSQMode msqMode : MSQMode.values()) {
|
||||
for (MSQMode msqMode : values()) {
|
||||
if (msqMode.value.equalsIgnoreCase(str)) {
|
||||
return msqMode;
|
||||
}
|
||||
|
@ -66,12 +66,12 @@ public enum MSQMode
|
|||
|
||||
public static void populateDefaultQueryContext(final String modeStr, final Map<String, Object> originalQueryContext)
|
||||
{
|
||||
MSQMode mode = MSQMode.fromString(modeStr);
|
||||
MSQMode mode = fromString(modeStr);
|
||||
if (mode == null) {
|
||||
throw new ISE(
|
||||
"%s is an unknown multi stage query mode. Acceptable modes: %s",
|
||||
modeStr,
|
||||
Arrays.stream(MSQMode.values()).map(m -> m.value).collect(Collectors.toList())
|
||||
Arrays.stream(values()).map(m -> m.value).collect(Collectors.toList())
|
||||
);
|
||||
}
|
||||
log.debug("Populating default query context with %s for the %s multi stage query mode", mode.defaultQueryContext, mode);
|
||||
|
|
|
@ -327,7 +327,7 @@ public class S3Utils
|
|||
log.debug("Deleting keys from bucket: [%s], keys: [%s]", bucket, keys);
|
||||
}
|
||||
DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucket).withKeys(keysToDelete);
|
||||
S3Utils.retryS3Operation(() -> {
|
||||
retryS3Operation(() -> {
|
||||
s3Client.deleteObjects(deleteRequest);
|
||||
return null;
|
||||
}, retries);
|
||||
|
@ -353,7 +353,7 @@ public class S3Utils
|
|||
final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, file);
|
||||
|
||||
if (!disableAcl) {
|
||||
putObjectRequest.setAccessControlList(S3Utils.grantFullControlToBucketOwner(service, bucket));
|
||||
putObjectRequest.setAccessControlList(grantFullControlToBucketOwner(service, bucket));
|
||||
}
|
||||
log.info("Pushing [%s] to bucket[%s] and key[%s].", file, bucket, key);
|
||||
service.putObject(putObjectRequest);
|
||||
|
|
|
@ -958,7 +958,7 @@ public class DeterminePartitionsJob implements Jobby
|
|||
@Override
|
||||
public void checkOutputSpecs(JobContext job) throws IOException
|
||||
{
|
||||
Path outDir = FileOutputFormat.getOutputPath(job);
|
||||
Path outDir = getOutputPath(job);
|
||||
if (outDir == null) {
|
||||
throw new InvalidJobConfException("Output directory not set.");
|
||||
}
|
||||
|
|
|
@ -148,13 +148,13 @@ public class HadoopDruidIndexerConfig
|
|||
// the Map<> intermediary
|
||||
|
||||
if (argSpec.containsKey("spec")) {
|
||||
return HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
|
||||
return JSON_MAPPER.convertValue(
|
||||
argSpec,
|
||||
HadoopDruidIndexerConfig.class
|
||||
);
|
||||
}
|
||||
return new HadoopDruidIndexerConfig(
|
||||
HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
|
||||
JSON_MAPPER.convertValue(
|
||||
argSpec,
|
||||
HadoopIngestionSpec.class
|
||||
)
|
||||
|
@ -166,7 +166,7 @@ public class HadoopDruidIndexerConfig
|
|||
{
|
||||
try {
|
||||
return fromMap(
|
||||
HadoopDruidIndexerConfig.JSON_MAPPER.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
JSON_MAPPER.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
);
|
||||
}
|
||||
catch (IOException e) {
|
||||
|
@ -180,7 +180,7 @@ public class HadoopDruidIndexerConfig
|
|||
// This is a map to try and prevent dependency screwbally-ness
|
||||
try {
|
||||
return fromMap(
|
||||
HadoopDruidIndexerConfig.JSON_MAPPER.readValue(str, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
JSON_MAPPER.readValue(str, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
);
|
||||
}
|
||||
catch (IOException e) {
|
||||
|
@ -197,7 +197,7 @@ public class HadoopDruidIndexerConfig
|
|||
Reader reader = new InputStreamReader(fs.open(pt), StandardCharsets.UTF_8);
|
||||
|
||||
return fromMap(
|
||||
HadoopDruidIndexerConfig.JSON_MAPPER.readValue(reader, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
JSON_MAPPER.readValue(reader, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
|
||||
);
|
||||
}
|
||||
catch (Exception e) {
|
||||
|
@ -207,7 +207,7 @@ public class HadoopDruidIndexerConfig
|
|||
|
||||
public static HadoopDruidIndexerConfig fromConfiguration(Configuration conf)
|
||||
{
|
||||
final HadoopDruidIndexerConfig retVal = fromString(conf.get(HadoopDruidIndexerConfig.CONFIG_PROPERTY));
|
||||
final HadoopDruidIndexerConfig retVal = fromString(conf.get(CONFIG_PROPERTY));
|
||||
retVal.verify();
|
||||
return retVal;
|
||||
}
|
||||
|
@ -590,7 +590,7 @@ public class HadoopDruidIndexerConfig
|
|||
Configuration conf = job.getConfiguration();
|
||||
|
||||
try {
|
||||
conf.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(this));
|
||||
conf.set(CONFIG_PROPERTY, JSON_MAPPER.writeValueAsString(this));
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
|
|
@ -188,9 +188,9 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
this.ignoreInvalidRows = Configs.valueOrDefault(ignoreInvalidRows, false);
|
||||
this.maxParseExceptions = Configs.valueOrDefault(
|
||||
maxParseExceptions,
|
||||
this.ignoreInvalidRows ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS : 0
|
||||
this.ignoreInvalidRows ? DEFAULT_MAX_PARSE_EXCEPTIONS : 0
|
||||
);
|
||||
this.logParseExceptions = Configs.valueOrDefault(logParseExceptions, TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS);
|
||||
this.logParseExceptions = Configs.valueOrDefault(logParseExceptions, DEFAULT_LOG_PARSE_EXCEPTIONS);
|
||||
this.useYarnRMJobStatusFallback = Configs.valueOrDefault(useYarnRMJobStatusFallback, true);
|
||||
|
||||
if (awaitSegmentAvailabilityTimeoutMillis == null || awaitSegmentAvailabilityTimeoutMillis < 0) {
|
||||
|
|
|
@ -413,9 +413,9 @@ public class JobHelper
|
|||
authenticate();
|
||||
|
||||
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(indexerSchema);
|
||||
final Configuration configuration = JobHelper.injectSystemProperties(new Configuration(), config);
|
||||
final Configuration configuration = injectSystemProperties(new Configuration(), config);
|
||||
config.addJobProperties(configuration);
|
||||
JobHelper.injectDruidProperties(configuration, config);
|
||||
injectDruidProperties(configuration, config);
|
||||
if (!config.getSchema().getTuningConfig().isLeaveIntermediate()) {
|
||||
if (jobSucceeded || config.getSchema().getTuningConfig().isCleanupOnFailure()) {
|
||||
Path workingPath = config.makeIntermediatePath();
|
||||
|
@ -619,7 +619,7 @@ public class JobHelper
|
|||
prependFSIfNullScheme(fs, basePath),
|
||||
StringUtils.format(
|
||||
"./%s.%d",
|
||||
dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP),
|
||||
dataSegmentPusher.makeIndexPathName(segmentTemplate, INDEX_ZIP),
|
||||
taskAttemptID.getId()
|
||||
)
|
||||
);
|
||||
|
@ -640,9 +640,9 @@ public class JobHelper
|
|||
) throws IOException
|
||||
{
|
||||
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(indexerSchema);
|
||||
final Configuration configuration = JobHelper.injectSystemProperties(new Configuration(), config);
|
||||
final Configuration configuration = injectSystemProperties(new Configuration(), config);
|
||||
config.addJobProperties(configuration);
|
||||
JobHelper.injectDruidProperties(configuration, config);
|
||||
injectDruidProperties(configuration, config);
|
||||
for (DataSegmentAndIndexZipFilePath segmentAndIndexZipFilePath : segmentAndIndexZipFilePaths) {
|
||||
Path tmpPath = new Path(segmentAndIndexZipFilePath.getTmpIndexZipFilePath());
|
||||
Path finalIndexZipFilePath = new Path(segmentAndIndexZipFilePath.getFinalIndexZipFilePath());
|
||||
|
|
|
@ -160,7 +160,7 @@ public class DatasourceInputFormat extends InputFormat<NullWritable, InputRow>
|
|||
{
|
||||
// to avoid globbing which needs input path should be hadoop-compatible (':' is not acceptable in path, etc.)
|
||||
List<FileStatus> statusList = new ArrayList<>();
|
||||
for (Path path : FileInputFormat.getInputPaths(job)) {
|
||||
for (Path path : getInputPaths(job)) {
|
||||
// load spec in segment points specifically zip file itself
|
||||
statusList.add(path.getFileSystem(job).getFileStatus(path));
|
||||
}
|
||||
|
|
|
@ -812,7 +812,7 @@ public abstract class AbstractBatchIndexTask extends AbstractTask
|
|||
throw new ISE("Unspecified interval[%s] in granularitySpec[%s]", interval, granularitySpec);
|
||||
}
|
||||
|
||||
version = AbstractBatchIndexTask.findVersion(versions, interval);
|
||||
version = findVersion(versions, interval);
|
||||
if (version == null) {
|
||||
throw new ISE("Cannot find a version for interval[%s]", interval);
|
||||
}
|
||||
|
@ -820,7 +820,7 @@ public abstract class AbstractBatchIndexTask extends AbstractTask
|
|||
// We don't have explicit intervals. We can use the segment granularity to figure out what
|
||||
// interval we need, but we might not have already locked it.
|
||||
interval = granularitySpec.getSegmentGranularity().bucket(timestamp);
|
||||
final String existingLockVersion = AbstractBatchIndexTask.findVersion(versions, interval);
|
||||
final String existingLockVersion = findVersion(versions, interval);
|
||||
if (existingLockVersion == null) {
|
||||
if (ingestionSpec.getTuningConfig() instanceof ParallelIndexTuningConfig) {
|
||||
final int maxAllowedLockCount = ((ParallelIndexTuningConfig) ingestionSpec.getTuningConfig())
|
||||
|
|
|
@ -535,7 +535,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
|
|||
String hadoopJobIdFile = getHadoopJobIdFileName();
|
||||
|
||||
try {
|
||||
ClassLoader loader = HadoopTask.buildClassLoader(
|
||||
ClassLoader loader = buildClassLoader(
|
||||
getHadoopDependencyCoordinates(),
|
||||
taskConfig.getDefaultHadoopCoordinates()
|
||||
);
|
||||
|
@ -616,7 +616,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
|
|||
|
||||
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
|
||||
try {
|
||||
ClassLoader loader = HadoopTask.buildClassLoader(
|
||||
ClassLoader loader = buildClassLoader(
|
||||
getHadoopDependencyCoordinates(),
|
||||
taskConfig.getDefaultHadoopCoordinates()
|
||||
);
|
||||
|
|
|
@ -81,7 +81,6 @@ import org.apache.druid.segment.incremental.RowIngestionMeters;
|
|||
import org.apache.druid.segment.indexing.BatchIOConfig;
|
||||
import org.apache.druid.segment.indexing.DataSchema;
|
||||
import org.apache.druid.segment.indexing.IngestionSpec;
|
||||
import org.apache.druid.segment.indexing.TuningConfig;
|
||||
import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
|
||||
import org.apache.druid.segment.indexing.granularity.GranularitySpec;
|
||||
import org.apache.druid.segment.realtime.ChatHandler;
|
||||
|
@ -742,7 +741,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler, Pe
|
|||
Comparators.intervalsByStartThenEnd()
|
||||
);
|
||||
final Granularity queryGranularity = granularitySpec.getQueryGranularity();
|
||||
try (final CloseableIterator<InputRow> inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
|
||||
try (final CloseableIterator<InputRow> inputRowIterator = inputSourceReader(
|
||||
tmpDir,
|
||||
ingestionSchema.getDataSchema(),
|
||||
inputSource,
|
||||
|
@ -1070,7 +1069,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler, Pe
|
|||
throw new IAE("Cannot use parser and inputSource together. Try using inputFormat instead of parser.");
|
||||
}
|
||||
|
||||
IngestionMode ingestionMode = AbstractTask.computeBatchIngestionMode(ioConfig);
|
||||
IngestionMode ingestionMode = computeBatchIngestionMode(ioConfig);
|
||||
|
||||
if (ingestionMode == IngestionMode.REPLACE && dataSchema.getGranularitySpec()
|
||||
.inputIntervals()
|
||||
|
@ -1133,8 +1132,8 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler, Pe
|
|||
{
|
||||
this.inputSource = inputSource;
|
||||
this.inputFormat = inputFormat;
|
||||
this.appendToExisting = appendToExisting == null ? BatchIOConfig.DEFAULT_APPEND_EXISTING : appendToExisting;
|
||||
this.dropExisting = dropExisting == null ? BatchIOConfig.DEFAULT_DROP_EXISTING : dropExisting;
|
||||
this.appendToExisting = appendToExisting == null ? DEFAULT_APPEND_EXISTING : appendToExisting;
|
||||
this.dropExisting = dropExisting == null ? DEFAULT_DROP_EXISTING : dropExisting;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
@ -1367,7 +1366,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler, Pe
|
|||
)
|
||||
{
|
||||
this.appendableIndexSpec = appendableIndexSpec == null ? DEFAULT_APPENDABLE_INDEX : appendableIndexSpec;
|
||||
this.maxRowsInMemory = maxRowsInMemory == null ? TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY_BATCH : maxRowsInMemory;
|
||||
this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY_BATCH : maxRowsInMemory;
|
||||
// initializing this to 0, it will be lazily initialized to a value
|
||||
// @see #getMaxBytesInMemoryOrDefault()
|
||||
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
|
||||
|
@ -1395,14 +1394,14 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler, Pe
|
|||
this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0 : Math.min(1, maxSavedParseExceptions);
|
||||
} else {
|
||||
this.maxParseExceptions = maxParseExceptions == null
|
||||
? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
|
||||
? DEFAULT_MAX_PARSE_EXCEPTIONS
|
||||
: maxParseExceptions;
|
||||
this.maxSavedParseExceptions = maxSavedParseExceptions == null
|
||||
? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
|
||||
? DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
|
||||
: maxSavedParseExceptions;
|
||||
}
|
||||
this.logParseExceptions = logParseExceptions == null
|
||||
? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
|
||||
? DEFAULT_LOG_PARSE_EXCEPTIONS
|
||||
: logParseExceptions;
|
||||
if (awaitSegmentAvailabilityTimeoutMillis == null || awaitSegmentAvailabilityTimeoutMillis < 0) {
|
||||
this.awaitSegmentAvailabilityTimeoutMillis = DEFAULT_AWAIT_SEGMENT_AVAILABILITY_TIMEOUT_MILLIS;
|
||||
|
|
|
@ -138,12 +138,12 @@ public class IndexTaskUtils
|
|||
)
|
||||
{
|
||||
final ServiceMetricEvent.Builder metricBuilder = new ServiceMetricEvent.Builder();
|
||||
IndexTaskUtils.setTaskDimensions(metricBuilder, task);
|
||||
setTaskDimensions(metricBuilder, task);
|
||||
|
||||
if (publishResult.isSuccess()) {
|
||||
toolbox.getEmitter().emit(metricBuilder.setMetric("segment/txn/success", 1));
|
||||
for (DataSegment segment : publishResult.getSegments()) {
|
||||
IndexTaskUtils.setSegmentDimensions(metricBuilder, segment);
|
||||
setSegmentDimensions(metricBuilder, segment);
|
||||
toolbox.getEmitter().emit(metricBuilder.setMetric("segment/added/bytes", segment.getSize()));
|
||||
toolbox.getEmitter().emit(SegmentMetadataEvent.create(segment, DateTimes.nowUtc()));
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public class LegacySinglePhaseSubTask extends SinglePhaseSubTask
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return SinglePhaseSubTask.OLD_TYPE_NAME;
|
||||
return OLD_TYPE_NAME;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
|
|||
import org.apache.druid.indexing.common.TaskToolbox;
|
||||
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
|
||||
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
||||
import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
|
||||
import org.apache.druid.indexing.common.task.TaskResource;
|
||||
import org.apache.druid.java.util.common.granularity.Granularity;
|
||||
import org.apache.druid.java.util.common.parsers.CloseableIterator;
|
||||
|
@ -182,7 +181,7 @@ public class PartialDimensionCardinalityTask extends PerfectRollupWorkerTask
|
|||
tuningConfig.getMaxSavedParseExceptions()
|
||||
);
|
||||
try (
|
||||
final CloseableIterator<InputRow> inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
|
||||
final CloseableIterator<InputRow> inputRowIterator = inputSourceReader(
|
||||
toolbox.getIndexingTmpDir(),
|
||||
dataSchema,
|
||||
inputSource,
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.druid.indexer.partitions.DimensionRangePartitionsSpec;
|
|||
import org.apache.druid.indexing.common.TaskToolbox;
|
||||
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
|
||||
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
||||
import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
|
||||
import org.apache.druid.indexing.common.task.TaskResource;
|
||||
import org.apache.druid.indexing.common.task.batch.parallel.distribution.StringDistribution;
|
||||
import org.apache.druid.indexing.common.task.batch.parallel.distribution.StringSketch;
|
||||
|
@ -229,7 +228,7 @@ public class PartialDimensionDistributionTask extends PerfectRollupWorkerTask
|
|||
tuningConfig.getMaxSavedParseExceptions()
|
||||
);
|
||||
try (
|
||||
final CloseableIterator<InputRow> inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
|
||||
final CloseableIterator<InputRow> inputRowIterator = inputSourceReader(
|
||||
toolbox.getIndexingTmpDir(),
|
||||
dataSchema,
|
||||
inputSource,
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.druid.indexing.common.TaskToolbox;
|
|||
import org.apache.druid.indexing.common.actions.LockListAction;
|
||||
import org.apache.druid.indexing.common.actions.SurrogateAction;
|
||||
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
||||
import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
|
||||
import org.apache.druid.indexing.common.task.TaskResource;
|
||||
import org.apache.druid.java.util.common.FileUtils;
|
||||
import org.apache.druid.java.util.common.ISE;
|
||||
|
@ -293,7 +292,7 @@ abstract class PartialSegmentMergeTask<S extends ShardSpec> extends PerfectRollu
|
|||
getDataSource(),
|
||||
interval,
|
||||
Preconditions.checkNotNull(
|
||||
AbstractBatchIndexTask.findVersion(intervalToVersion, interval),
|
||||
findVersion(intervalToVersion, interval),
|
||||
"version for interval[%s]",
|
||||
interval
|
||||
),
|
||||
|
|
|
@ -38,8 +38,6 @@ import org.apache.druid.indexing.common.TaskToolbox;
|
|||
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
|
||||
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
||||
import org.apache.druid.indexing.common.stats.TaskRealtimeMetricsMonitor;
|
||||
import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
|
||||
import org.apache.druid.indexing.common.task.AbstractTask;
|
||||
import org.apache.druid.indexing.common.task.BatchAppenderators;
|
||||
import org.apache.druid.indexing.common.task.IndexTask;
|
||||
import org.apache.druid.indexing.common.task.IndexTaskUtils;
|
||||
|
@ -165,7 +163,7 @@ public class SinglePhaseSubTask extends AbstractBatchSubtask implements ChatHand
|
|||
taskResource,
|
||||
ingestionSchema.getDataSchema().getDataSource(),
|
||||
context,
|
||||
AbstractTask.computeBatchIngestionMode(ingestionSchema.getIOConfig()),
|
||||
computeBatchIngestionMode(ingestionSchema.getIOConfig()),
|
||||
supervisorTaskId
|
||||
);
|
||||
|
||||
|
@ -409,7 +407,7 @@ public class SinglePhaseSubTask extends AbstractBatchSubtask implements ChatHand
|
|||
boolean exceptionOccurred = false;
|
||||
try (
|
||||
final BatchAppenderatorDriver driver = BatchAppenderators.newDriver(appenderator, toolbox, segmentAllocator);
|
||||
final CloseableIterator<InputRow> inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
|
||||
final CloseableIterator<InputRow> inputRowIterator = inputSourceReader(
|
||||
tmpDir,
|
||||
dataSchema,
|
||||
inputSource,
|
||||
|
|
|
@ -397,7 +397,7 @@ public class DruidInputSource extends AbstractInputSource implements SplittableI
|
|||
coordinatorClient,
|
||||
dataSource,
|
||||
interval,
|
||||
splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
|
||||
splitHintSpec == null ? DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
|
||||
)
|
||||
);
|
||||
} else {
|
||||
|
@ -417,7 +417,7 @@ public class DruidInputSource extends AbstractInputSource implements SplittableI
|
|||
coordinatorClient,
|
||||
dataSource,
|
||||
interval,
|
||||
splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
|
||||
splitHintSpec == null ? DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
|
||||
)
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -54,7 +54,7 @@ public class QuotableWhiteSpaceSplitter implements Iterable<String>
|
|||
if (inQuotes) {
|
||||
return false;
|
||||
}
|
||||
return CharMatcher.breakingWhitespace().matches(c);
|
||||
return breakingWhitespace().matches(c);
|
||||
}
|
||||
}
|
||||
).omitEmptyStrings().split(string).iterator();
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
|
||||
import org.apache.druid.segment.IndexSpec;
|
||||
import org.apache.druid.segment.incremental.AppendableIndexSpec;
|
||||
import org.apache.druid.segment.indexing.TuningConfig;
|
||||
import org.apache.druid.segment.realtime.appenderator.AppenderatorConfig;
|
||||
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
|
||||
import org.joda.time.Period;
|
||||
|
@ -131,14 +130,14 @@ public abstract class SeekableStreamIndexTaskTuningConfig implements Appenderato
|
|||
this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0 : Math.min(1, maxSavedParseExceptions);
|
||||
} else {
|
||||
this.maxParseExceptions = maxParseExceptions == null
|
||||
? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
|
||||
? DEFAULT_MAX_PARSE_EXCEPTIONS
|
||||
: maxParseExceptions;
|
||||
this.maxSavedParseExceptions = maxSavedParseExceptions == null
|
||||
? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
|
||||
? DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
|
||||
: maxSavedParseExceptions;
|
||||
}
|
||||
this.logParseExceptions = logParseExceptions == null
|
||||
? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
|
||||
? DEFAULT_LOG_PARSE_EXCEPTIONS
|
||||
: logParseExceptions;
|
||||
this.numPersistThreads = numPersistThreads == null ?
|
||||
DEFAULT_NUM_PERSIST_THREADS : Math.max(numPersistThreads, DEFAULT_NUM_PERSIST_THREADS);
|
||||
|
|
|
@ -66,7 +66,7 @@ public class CliCustomNodeRole extends ServerRunnable
|
|||
public static final String SERVICE_NAME = "custom-node-role";
|
||||
public static final int PORT = 9301;
|
||||
public static final int TLS_PORT = 9501;
|
||||
public static final NodeRole NODE_ROLE = new NodeRole(CliCustomNodeRole.SERVICE_NAME);
|
||||
public static final NodeRole NODE_ROLE = new NodeRole(SERVICE_NAME);
|
||||
|
||||
public CliCustomNodeRole()
|
||||
{
|
||||
|
@ -85,9 +85,9 @@ public class CliCustomNodeRole extends ServerRunnable
|
|||
return ImmutableList.of(
|
||||
binder -> {
|
||||
LOG.info("starting up custom node role");
|
||||
binder.bindConstant().annotatedWith(Names.named("serviceName")).to(CliCustomNodeRole.SERVICE_NAME);
|
||||
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(CliCustomNodeRole.PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(CliCustomNodeRole.TLS_PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("serviceName")).to(SERVICE_NAME);
|
||||
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(TLS_PORT);
|
||||
|
||||
binder.bind(JettyServerInitializer.class).to(CustomJettyServiceInitializer.class).in(LazySingleton.class);
|
||||
LifecycleModule.register(binder, Server.class);
|
||||
|
|
|
@ -65,7 +65,7 @@ public class CliCustomNodeRole extends ServerRunnable
|
|||
public static final String SERVICE_NAME = "custom-node-role";
|
||||
public static final int PORT = 9301;
|
||||
public static final int TLS_PORT = 9501;
|
||||
public static final NodeRole NODE_ROLE = new NodeRole(CliCustomNodeRole.SERVICE_NAME);
|
||||
public static final NodeRole NODE_ROLE = new NodeRole(SERVICE_NAME);
|
||||
|
||||
public CliCustomNodeRole()
|
||||
{
|
||||
|
@ -84,9 +84,9 @@ public class CliCustomNodeRole extends ServerRunnable
|
|||
return ImmutableList.of(
|
||||
binder -> {
|
||||
LOG.info("starting up");
|
||||
binder.bindConstant().annotatedWith(Names.named("serviceName")).to(CliCustomNodeRole.SERVICE_NAME);
|
||||
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(CliCustomNodeRole.PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(CliCustomNodeRole.TLS_PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("serviceName")).to(SERVICE_NAME);
|
||||
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(PORT);
|
||||
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(TLS_PORT);
|
||||
|
||||
binder.bind(JettyServerInitializer.class).to(CustomJettyServiceInitializer.class).in(LazySingleton.class);
|
||||
LifecycleModule.register(binder, Server.class);
|
||||
|
|
|
@ -73,7 +73,7 @@ public class AvroSchemaRegistryEventSerializer extends AvroEventSerializer
|
|||
try {
|
||||
RetryUtils.retry(
|
||||
() -> {
|
||||
schemaId = client.register(topic, AvroEventSerializer.SCHEMA);
|
||||
schemaId = client.register(topic, SCHEMA);
|
||||
fromRegistry = client.getById(schemaId);
|
||||
return 0;
|
||||
},
|
||||
|
|
|
@ -68,7 +68,7 @@ public class ProtobufSchemaRegistryEventSerializer extends ProtobufEventSerializ
|
|||
try {
|
||||
RetryUtils.retry(
|
||||
() -> {
|
||||
schemaId = client.register(topic, new ProtobufSchema(ProtobufEventSerializer.SCHEMA.newMessageBuilder("Wikipedia").getDescriptorForType()));
|
||||
schemaId = client.register(topic, new ProtobufSchema(SCHEMA.newMessageBuilder("Wikipedia").getDescriptorForType()));
|
||||
return 0;
|
||||
},
|
||||
(e) -> true,
|
||||
|
|
|
@ -101,7 +101,7 @@ public class PolygonBound extends RectangularBound
|
|||
|
||||
public static PolygonBound from(float[] abscissa, float[] ordinate)
|
||||
{
|
||||
return PolygonBound.from(abscissa, ordinate, 0);
|
||||
return from(abscissa, ordinate, 0);
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
|
|
@ -129,7 +129,7 @@ public class NullHandling
|
|||
*/
|
||||
public static boolean useThreeValueLogic()
|
||||
{
|
||||
return NullHandling.sqlCompatible() &&
|
||||
return sqlCompatible() &&
|
||||
INSTANCE.isUseThreeValueLogicForNativeFilters() &&
|
||||
ExpressionProcessing.useStrictBooleans();
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ public class NullHandling
|
|||
*/
|
||||
public static boolean mustCombineNullAndEmptyInDictionary(final Indexed<ByteBuffer> dictionaryUtf8)
|
||||
{
|
||||
return NullHandling.replaceWithDefault()
|
||||
return replaceWithDefault()
|
||||
&& dictionaryUtf8.size() >= 2
|
||||
&& isNullOrEquivalent(dictionaryUtf8.get(0))
|
||||
&& isNullOrEquivalent(dictionaryUtf8.get(1));
|
||||
|
@ -285,7 +285,7 @@ public class NullHandling
|
|||
*/
|
||||
public static boolean mustReplaceFirstValueWithNullInDictionary(final Indexed<ByteBuffer> dictionaryUtf8)
|
||||
{
|
||||
if (NullHandling.replaceWithDefault() && dictionaryUtf8.size() >= 1) {
|
||||
if (replaceWithDefault() && dictionaryUtf8.size() >= 1) {
|
||||
final ByteBuffer firstValue = dictionaryUtf8.get(0);
|
||||
return firstValue != null && firstValue.remaining() == 0;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class FutureUtils
|
|||
public static <T> T getUnchecked(final ListenableFuture<T> future, final boolean cancelIfInterrupted)
|
||||
{
|
||||
try {
|
||||
return FutureUtils.get(future, cancelIfInterrupted);
|
||||
return get(future, cancelIfInterrupted);
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
|
@ -103,7 +103,7 @@ public class FutureUtils
|
|||
/**
|
||||
* Like {@link Futures#transform}, but works better with lambdas due to not having overloads.
|
||||
*
|
||||
* One can write {@code FutureUtils.transform(future, v -> ...)} instead of
|
||||
* One can write {@code transform(future, v -> ...)} instead of
|
||||
* {@code Futures.transform(future, (Function<? super T, ?>) v -> ...)}
|
||||
*/
|
||||
public static <T, R> ListenableFuture<R> transform(final ListenableFuture<T> future, final Function<T, R> fn)
|
||||
|
@ -115,7 +115,7 @@ public class FutureUtils
|
|||
* Like {@link Futures#transformAsync(ListenableFuture, AsyncFunction, java.util.concurrent.Executor)}, but works better with lambdas due to not having
|
||||
* overloads.
|
||||
*
|
||||
* One can write {@code FutureUtils.transformAsync(future, v -> ...)} instead of
|
||||
* One can write {@code transformAsync(future, v -> ...)} instead of
|
||||
* {@code Futures.transform(future, (Function<? super T, ?>) v -> ...)}
|
||||
*/
|
||||
public static <T, R> ListenableFuture<R> transformAsync(final ListenableFuture<T> future, final AsyncFunction<T, R> fn)
|
||||
|
|
|
@ -100,7 +100,7 @@ public class IdUtils
|
|||
|
||||
public static String getRandomIdWithPrefix(String prefix)
|
||||
{
|
||||
return UNDERSCORE_JOINER.join(prefix, IdUtils.getRandomId());
|
||||
return UNDERSCORE_JOINER.join(prefix, getRandomId());
|
||||
}
|
||||
|
||||
public static String newTaskId(String typeName, String dataSource, @Nullable Interval interval)
|
||||
|
|
|
@ -139,7 +139,7 @@ public class DimensionsSpec
|
|||
}
|
||||
|
||||
for (SpatialDimensionSchema spatialSchema : spatialDims) {
|
||||
DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema);
|
||||
DimensionSchema newSchema = convertSpatialSchema(spatialSchema);
|
||||
this.dimensions.add(newSchema);
|
||||
dimensionSchemaMap.put(newSchema.getName(), newSchema);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ public class DoubleDimensionSchema extends DimensionSchema
|
|||
@Override
|
||||
public String getTypeName()
|
||||
{
|
||||
return DimensionSchema.DOUBLE_TYPE_NAME;
|
||||
return DOUBLE_TYPE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,7 +39,7 @@ public class FloatDimensionSchema extends DimensionSchema
|
|||
@Override
|
||||
public String getTypeName()
|
||||
{
|
||||
return DimensionSchema.FLOAT_TYPE_NAME;
|
||||
return FLOAT_TYPE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -40,7 +40,7 @@ public class LongDimensionSchema extends DimensionSchema
|
|||
@Override
|
||||
public String getTypeName()
|
||||
{
|
||||
return DimensionSchema.LONG_TYPE_NAME;
|
||||
return LONG_TYPE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -59,7 +59,7 @@ public class NewSpatialDimensionSchema extends DimensionSchema
|
|||
@Override
|
||||
public String getTypeName()
|
||||
{
|
||||
return DimensionSchema.SPATIAL_TYPE_NAME;
|
||||
return SPATIAL_TYPE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -54,7 +54,7 @@ public class StringDimensionSchema extends DimensionSchema
|
|||
@Override
|
||||
public String getTypeName()
|
||||
{
|
||||
return DimensionSchema.STRING_TYPE_NAME;
|
||||
return STRING_TYPE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -209,7 +209,7 @@ public class ImmutableConciseSet
|
|||
}
|
||||
}
|
||||
ConciseSet newSet = new ConciseSet(words, false);
|
||||
return ImmutableConciseSet.newImmutableFromMutable(newSet);
|
||||
return newImmutableFromMutable(newSet);
|
||||
}
|
||||
|
||||
IntList retVal = new IntList();
|
||||
|
|
|
@ -240,12 +240,12 @@ public class Frame
|
|||
);
|
||||
}
|
||||
|
||||
return Frame.wrap(dstBuffer);
|
||||
return wrap(dstBuffer);
|
||||
} else {
|
||||
// Copy first, then decompress.
|
||||
final byte[] compressedFrame = new byte[compressedFrameLength];
|
||||
memory.getByteArray(frameStart, compressedFrame, 0, compressedFrameLength);
|
||||
return Frame.wrap(LZ4_DECOMPRESSOR.decompress(compressedFrame, uncompressedFrameLength));
|
||||
return wrap(LZ4_DECOMPRESSOR.decompress(compressedFrame, uncompressedFrameLength));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ public class RowKeyReader
|
|||
newSignature.add(columnName, columnType);
|
||||
}
|
||||
|
||||
return RowKeyReader.create(newSignature.build());
|
||||
return create(newSignature.build());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -243,7 +243,7 @@ public class OutputChannel
|
|||
*/
|
||||
public OutputChannel readOnly()
|
||||
{
|
||||
return OutputChannel.readOnly(readableChannelSupplier, partitionNumber);
|
||||
return readOnly(readableChannelSupplier, partitionNumber);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -252,7 +252,7 @@ public class DurableStorageUtils
|
|||
if (elements.size() < 2) {
|
||||
return false;
|
||||
}
|
||||
if (!DurableStorageUtils.QUERY_RESULTS_DIR.equals(elements.get(0))) {
|
||||
if (!QUERY_RESULTS_DIR.equals(elements.get(0))) {
|
||||
return false;
|
||||
}
|
||||
return knownTasks.contains(elements.get(1));
|
||||
|
|
|
@ -318,7 +318,7 @@ public class FrameWriterUtils
|
|||
return buf;
|
||||
}
|
||||
} else {
|
||||
return FrameWriterUtils.getUtf8ByteBufferFromString(selector.lookupName(dictionaryId));
|
||||
return getUtf8ByteBufferFromString(selector.lookupName(dictionaryId));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -394,7 +394,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
convertToDenseStorage();
|
||||
}
|
||||
|
||||
other = HyperLogLogCollector.makeCollector(tmpBuffer);
|
||||
other = makeCollector(tmpBuffer);
|
||||
}
|
||||
|
||||
final ByteBuffer otherBuffer = other.storageBuffer;
|
||||
|
@ -574,7 +574,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
|
||||
final ByteBuffer denseStorageBuffer;
|
||||
if (storageBuffer.remaining() != getNumBytesForDenseStorage()) {
|
||||
HyperLogLogCollector denseCollector = HyperLogLogCollector.makeCollector(storageBuffer.duplicate());
|
||||
HyperLogLogCollector denseCollector = makeCollector(storageBuffer.duplicate());
|
||||
denseCollector.convertToDenseStorage();
|
||||
denseStorageBuffer = denseCollector.storageBuffer;
|
||||
} else {
|
||||
|
@ -582,7 +582,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
}
|
||||
|
||||
if (otherBuffer.remaining() != getNumBytesForDenseStorage()) {
|
||||
HyperLogLogCollector otherCollector = HyperLogLogCollector.makeCollector(otherBuffer.duplicate());
|
||||
HyperLogLogCollector otherCollector = makeCollector(otherBuffer.duplicate());
|
||||
otherCollector.convertToDenseStorage();
|
||||
otherBuffer = otherCollector.storageBuffer;
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ public class TaskLocation
|
|||
|
||||
public static TaskLocation unknown()
|
||||
{
|
||||
return TaskLocation.UNKNOWN;
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
|
|
|
@ -125,7 +125,7 @@ public class DimensionRangePartitionsSpec implements DimensionBasedPartitionsSpe
|
|||
return resolvedMaxRowPerSegment; // NOTE: This returns the *resolved* value
|
||||
}
|
||||
|
||||
@JsonProperty(PartitionsSpec.MAX_ROWS_PER_SEGMENT)
|
||||
@JsonProperty(MAX_ROWS_PER_SEGMENT)
|
||||
protected Integer getMaxRowsPerSegmentForJson()
|
||||
{
|
||||
return maxRowsPerSegment;
|
||||
|
|
|
@ -46,7 +46,7 @@ public class DynamicPartitionsSpec implements PartitionsSpec
|
|||
|
||||
@JsonCreator
|
||||
public DynamicPartitionsSpec(
|
||||
@JsonProperty(PartitionsSpec.MAX_ROWS_PER_SEGMENT) @Nullable Integer maxRowsPerSegment,
|
||||
@JsonProperty(MAX_ROWS_PER_SEGMENT) @Nullable Integer maxRowsPerSegment,
|
||||
@JsonProperty("maxTotalRows") @Nullable Long maxTotalRows
|
||||
)
|
||||
{
|
||||
|
|
|
@ -93,7 +93,7 @@ public class HashedPartitionsSpec implements DimensionBasedPartitionsSpec
|
|||
// Supply default for targetRowsPerSegment if needed
|
||||
if (target.getValue() == null) {
|
||||
//noinspection VariableNotUsedInsideIf (false positive for this.numShards)
|
||||
this.maxRowsPerSegment = (this.numShards == null ? PartitionsSpec.DEFAULT_MAX_ROWS_PER_SEGMENT : null);
|
||||
this.maxRowsPerSegment = (this.numShards == null ? DEFAULT_MAX_ROWS_PER_SEGMENT : null);
|
||||
} else {
|
||||
this.maxRowsPerSegment = target.getValue();
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ public class Either<L, R>
|
|||
public <T> Either<L, T> map(final Function<R, T> fn)
|
||||
{
|
||||
if (isValue()) {
|
||||
return Either.value(fn.apply(value));
|
||||
return value(fn.apply(value));
|
||||
} else {
|
||||
// Safe because the value is never going to be returned.
|
||||
//noinspection unchecked
|
||||
|
|
|
@ -30,7 +30,7 @@ public class HumanReadableBytes
|
|||
|
||||
public HumanReadableBytes(String bytes)
|
||||
{
|
||||
this.bytes = HumanReadableBytes.parse(bytes);
|
||||
this.bytes = parse(bytes);
|
||||
}
|
||||
|
||||
public HumanReadableBytes(long bytes)
|
||||
|
|
|
@ -54,7 +54,7 @@ public class StringUtils
|
|||
private static final Base64.Decoder BASE64_DECODER = Base64.getDecoder();
|
||||
|
||||
// should be used only for estimation
|
||||
// returns the same result with StringUtils.fromUtf8(value).length for valid string values
|
||||
// returns the same result with fromUtf8(value).length for valid string values
|
||||
// does not check validity of format and returns over-estimated result for invalid string (see UT)
|
||||
public static int estimatedBinaryLengthAsUTF8(String value)
|
||||
{
|
||||
|
@ -257,7 +257,7 @@ public class StringUtils
|
|||
*/
|
||||
public static String fromUtf8(final ByteBuffer buffer)
|
||||
{
|
||||
return StringUtils.fromUtf8(buffer, buffer.remaining());
|
||||
return fromUtf8(buffer, buffer.remaining());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -273,7 +273,7 @@ public class StringUtils
|
|||
if (buffer == null) {
|
||||
return null;
|
||||
}
|
||||
return StringUtils.fromUtf8(buffer, buffer.remaining());
|
||||
return fromUtf8(buffer, buffer.remaining());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -390,7 +390,7 @@ public class StringUtils
|
|||
if (s == null) {
|
||||
return null;
|
||||
} else {
|
||||
return StringUtils.replaceChar(s, '%', "%%");
|
||||
return replaceChar(s, '%', "%%");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -422,7 +422,7 @@ public class StringUtils
|
|||
}
|
||||
|
||||
try {
|
||||
return StringUtils.replace(URLEncoder.encode(s, "UTF-8"), "+", "%20");
|
||||
return replace(URLEncoder.encode(s, "UTF-8"), "+", "%20");
|
||||
}
|
||||
catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -792,7 +792,7 @@ public class StringUtils
|
|||
} else {
|
||||
// Shorten firstValue to what could fit in maxBytes as UTF-8.
|
||||
final byte[] bytes = new byte[maxBytes];
|
||||
final int len = StringUtils.toUtf8WithLimit(s, ByteBuffer.wrap(bytes));
|
||||
final int len = toUtf8WithLimit(s, ByteBuffer.wrap(bytes));
|
||||
return new String(bytes, 0, len, StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
@ -831,6 +831,6 @@ public class StringUtils
|
|||
*/
|
||||
public static String escapeSql(String str)
|
||||
{
|
||||
return str == null ? null : StringUtils.replace(str, "'", "''");
|
||||
return str == null ? null : replace(str, "'", "''");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ public class Granularities
|
|||
|
||||
public static Granularity nullToAll(Granularity granularity)
|
||||
{
|
||||
return granularity == null ? Granularities.ALL : granularity;
|
||||
return granularity == null ? ALL : granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -176,7 +176,7 @@ public enum GranularityType
|
|||
*/
|
||||
public static boolean isStandard(Granularity granularity)
|
||||
{
|
||||
final GranularityType[] values = GranularityType.values();
|
||||
final GranularityType[] values = values();
|
||||
for (GranularityType value : values) {
|
||||
if (value.getDefaultGranularity().equals(granularity)) {
|
||||
return true;
|
||||
|
@ -205,42 +205,42 @@ public enum GranularityType
|
|||
|
||||
switch (index) {
|
||||
case 0:
|
||||
return GranularityType.YEAR;
|
||||
return YEAR;
|
||||
case 1:
|
||||
if (vals[index] == 3) {
|
||||
return GranularityType.QUARTER;
|
||||
return QUARTER;
|
||||
} else if (vals[index] == 1) {
|
||||
return GranularityType.MONTH;
|
||||
return MONTH;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
return GranularityType.WEEK;
|
||||
return WEEK;
|
||||
case 3:
|
||||
return GranularityType.DAY;
|
||||
return DAY;
|
||||
case 4:
|
||||
if (vals[index] == 8) {
|
||||
return GranularityType.EIGHT_HOUR;
|
||||
return EIGHT_HOUR;
|
||||
} else if (vals[index] == 6) {
|
||||
return GranularityType.SIX_HOUR;
|
||||
return SIX_HOUR;
|
||||
} else if (vals[index] == 1) {
|
||||
return GranularityType.HOUR;
|
||||
return HOUR;
|
||||
}
|
||||
break;
|
||||
case 5:
|
||||
if (vals[index] == 30) {
|
||||
return GranularityType.THIRTY_MINUTE;
|
||||
return THIRTY_MINUTE;
|
||||
} else if (vals[index] == 15) {
|
||||
return GranularityType.FIFTEEN_MINUTE;
|
||||
return FIFTEEN_MINUTE;
|
||||
} else if (vals[index] == 10) {
|
||||
return GranularityType.TEN_MINUTE;
|
||||
return TEN_MINUTE;
|
||||
} else if (vals[index] == 5) {
|
||||
return GranularityType.FIVE_MINUTE;
|
||||
return FIVE_MINUTE;
|
||||
} else if (vals[index] == 1) {
|
||||
return GranularityType.MINUTE;
|
||||
return MINUTE;
|
||||
}
|
||||
break;
|
||||
case 6:
|
||||
return GranularityType.SECOND;
|
||||
return SECOND;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class Sequences
|
|||
|
||||
public static <T> Sequence<T> concat(Iterable<Sequence<T>> sequences)
|
||||
{
|
||||
return concat(Sequences.simple(sequences));
|
||||
return concat(simple(sequences));
|
||||
}
|
||||
|
||||
public static <T> Sequence<T> concat(Sequence<? extends Sequence<T>> sequences)
|
||||
|
|
|
@ -103,7 +103,7 @@ public class JSONPathFieldSpec
|
|||
@JsonCreator
|
||||
public static JSONPathFieldSpec fromString(String name)
|
||||
{
|
||||
return JSONPathFieldSpec.createRootField(name);
|
||||
return createRootField(name);
|
||||
}
|
||||
|
||||
public static JSONPathFieldSpec createNestedField(String name, String expr)
|
||||
|
|
|
@ -76,7 +76,7 @@ public class ParserUtils
|
|||
if (input.contains(listDelimiter)) {
|
||||
return StreamSupport.stream(listSplitter.split(input).spliterator(), false)
|
||||
.map(NullHandling::emptyToNullIfNeeded)
|
||||
.map(value -> tryParseNumbers ? ParserUtils.tryParseStringAsNumber(value) : value)
|
||||
.map(value -> tryParseNumbers ? tryParseStringAsNumber(value) : value)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return tryParseNumbers ?
|
||||
|
|
|
@ -524,7 +524,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
|||
|
||||
private boolean needsToShutdown()
|
||||
{
|
||||
boolean needsToShutdown = Thread.interrupted() || shuttingDown;
|
||||
boolean needsToShutdown = interrupted() || shuttingDown;
|
||||
if (needsToShutdown) {
|
||||
Object lastBatch = concurrentBatch.getAndSet(null);
|
||||
if (lastBatch instanceof Batch) {
|
||||
|
@ -722,7 +722,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
|||
final long backoffCheckDelayMillis = config.getMinHttpTimeoutMillis() / 5;
|
||||
|
||||
try {
|
||||
Thread.sleep(backoffCheckDelayMillis);
|
||||
sleep(backoffCheckDelayMillis);
|
||||
}
|
||||
catch (InterruptedException ignored) {
|
||||
return;
|
||||
|
|
|
@ -46,7 +46,7 @@ public class AlertBuilder extends ServiceEventBuilder<AlertEvent>
|
|||
|
||||
public static AlertBuilder create(String descriptionFormat, Object... objects)
|
||||
{
|
||||
return AlertBuilder.createEmittable(null, descriptionFormat, objects);
|
||||
return createEmittable(null, descriptionFormat, objects);
|
||||
}
|
||||
|
||||
public static AlertBuilder createEmittable(ServiceEmitter emitter, String descriptionFormat, Object... objects)
|
||||
|
|
|
@ -82,11 +82,11 @@ public class Evals
|
|||
if (val instanceof Boolean) {
|
||||
return (Boolean) val;
|
||||
} else if (val instanceof String) {
|
||||
return Evals.asBoolean((String) val);
|
||||
return asBoolean((String) val);
|
||||
} else if (val instanceof Long) {
|
||||
return Evals.asBoolean((Long) val);
|
||||
return asBoolean((Long) val);
|
||||
} else if (val instanceof Number) {
|
||||
return Evals.asBoolean(((Number) val).doubleValue());
|
||||
return asBoolean(((Number) val).doubleValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -166,14 +166,14 @@ public abstract class ExprEval<T>
|
|||
Object[] array = new Object[val.size()];
|
||||
int i = 0;
|
||||
for (Object o : val) {
|
||||
array[i++] = o != null ? ExprEval.ofType(ExpressionType.LONG, o).value() : null;
|
||||
array[i++] = o != null ? ofType(ExpressionType.LONG, o).value() : null;
|
||||
}
|
||||
return new NonnullPair<>(ExpressionType.LONG_ARRAY, array);
|
||||
} else if (coercedType == Float.class || coercedType == Double.class) {
|
||||
Object[] array = new Object[val.size()];
|
||||
int i = 0;
|
||||
for (Object o : val) {
|
||||
array[i++] = o != null ? ExprEval.ofType(ExpressionType.DOUBLE, o).value() : null;
|
||||
array[i++] = o != null ? ofType(ExpressionType.DOUBLE, o).value() : null;
|
||||
}
|
||||
return new NonnullPair<>(ExpressionType.DOUBLE_ARRAY, array);
|
||||
} else if (coercedType == Object.class) {
|
||||
|
@ -184,7 +184,7 @@ public abstract class ExprEval<T>
|
|||
ExpressionType elementType = null;
|
||||
for (Object o : val) {
|
||||
if (o != null) {
|
||||
ExprEval<?> eval = ExprEval.bestEffortOf(o);
|
||||
ExprEval<?> eval = bestEffortOf(o);
|
||||
elementType = ExpressionTypeConversion.leastRestrictiveType(elementType, eval.type());
|
||||
evals[i++] = eval;
|
||||
} else {
|
||||
|
@ -196,7 +196,7 @@ public abstract class ExprEval<T>
|
|||
if (eval != null) {
|
||||
array[i++] = eval.castTo(elementType).value();
|
||||
} else {
|
||||
array[i++] = ExprEval.ofType(elementType, null).value();
|
||||
array[i++] = ofType(elementType, null).value();
|
||||
}
|
||||
}
|
||||
ExpressionType arrayType = elementType == null
|
||||
|
@ -208,7 +208,7 @@ public abstract class ExprEval<T>
|
|||
Object[] array = new Object[val.size()];
|
||||
int i = 0;
|
||||
for (Object o : val) {
|
||||
array[i++] = o != null ? ExprEval.ofType(ExpressionType.STRING, o).value() : null;
|
||||
array[i++] = o != null ? ofType(ExpressionType.STRING, o).value() : null;
|
||||
}
|
||||
return new NonnullPair<>(ExpressionType.STRING_ARRAY, array);
|
||||
}
|
||||
|
@ -362,11 +362,11 @@ public abstract class ExprEval<T>
|
|||
{
|
||||
switch (type.getType()) {
|
||||
case DOUBLE:
|
||||
return ExprEval.of(Evals.asDouble(value));
|
||||
return of(Evals.asDouble(value));
|
||||
case LONG:
|
||||
return ofLongBoolean(value);
|
||||
case STRING:
|
||||
return ExprEval.of(String.valueOf(value));
|
||||
return of(String.valueOf(value));
|
||||
default:
|
||||
throw new Types.InvalidCastBooleanException(type);
|
||||
}
|
||||
|
@ -559,7 +559,7 @@ public abstract class ExprEval<T>
|
|||
return ofLongBoolean((Boolean) value);
|
||||
}
|
||||
if (value instanceof String) {
|
||||
return ofLong(ExprEval.computeNumber((String) value));
|
||||
return ofLong(computeNumber((String) value));
|
||||
}
|
||||
return ofLong(null);
|
||||
case DOUBLE:
|
||||
|
@ -570,7 +570,7 @@ public abstract class ExprEval<T>
|
|||
return ofDouble(Evals.asDouble((Boolean) value));
|
||||
}
|
||||
if (value instanceof String) {
|
||||
return ofDouble(ExprEval.computeNumber((String) value));
|
||||
return ofDouble(computeNumber((String) value));
|
||||
}
|
||||
return ofDouble(null);
|
||||
case COMPLEX:
|
||||
|
@ -605,7 +605,7 @@ public abstract class ExprEval<T>
|
|||
Object[] array = new Object[theList.size()];
|
||||
int i = 0;
|
||||
for (Object o : theList) {
|
||||
array[i++] = ExprEval.ofType(elementType, o).value();
|
||||
array[i++] = ofType(elementType, o).value();
|
||||
}
|
||||
return ofArray(type, array);
|
||||
}
|
||||
|
@ -615,7 +615,7 @@ public abstract class ExprEval<T>
|
|||
Object[] array = new Object[inputArray.length];
|
||||
int i = 0;
|
||||
for (Object o : inputArray) {
|
||||
array[i++] = ExprEval.ofType(elementType, o).value();
|
||||
array[i++] = ofType(elementType, o).value();
|
||||
}
|
||||
return ofArray(type, array);
|
||||
}
|
||||
|
@ -890,20 +890,20 @@ public abstract class ExprEval<T>
|
|||
return this;
|
||||
case LONG:
|
||||
if (value == null) {
|
||||
return ExprEval.ofLong(null);
|
||||
return ofLong(null);
|
||||
} else {
|
||||
return ExprEval.of(asLong());
|
||||
return of(asLong());
|
||||
}
|
||||
case STRING:
|
||||
return ExprEval.of(asString());
|
||||
return of(asString());
|
||||
case ARRAY:
|
||||
switch (castTo.getElementType().getType()) {
|
||||
case DOUBLE:
|
||||
return ExprEval.ofDoubleArray(asArray());
|
||||
return ofDoubleArray(asArray());
|
||||
case LONG:
|
||||
return ExprEval.ofLongArray(value == null ? null : new Object[]{value.longValue()});
|
||||
return ofLongArray(value == null ? null : new Object[]{value.longValue()});
|
||||
case STRING:
|
||||
return ExprEval.ofStringArray(value == null ? null : new Object[]{value.toString()});
|
||||
return ofStringArray(value == null ? null : new Object[]{value.toString()});
|
||||
default:
|
||||
ExpressionType elementType = (ExpressionType) castTo.getElementType();
|
||||
return new ArrayExprEval(castTo, new Object[]{castTo(elementType).value()});
|
||||
|
@ -971,25 +971,25 @@ public abstract class ExprEval<T>
|
|||
switch (castTo.getType()) {
|
||||
case DOUBLE:
|
||||
if (value == null) {
|
||||
return ExprEval.ofDouble(null);
|
||||
return ofDouble(null);
|
||||
} else {
|
||||
return ExprEval.of(asDouble());
|
||||
return of(asDouble());
|
||||
}
|
||||
case LONG:
|
||||
return this;
|
||||
case STRING:
|
||||
return ExprEval.of(asString());
|
||||
return of(asString());
|
||||
case ARRAY:
|
||||
if (value == null) {
|
||||
return new ArrayExprEval(castTo, null);
|
||||
}
|
||||
switch (castTo.getElementType().getType()) {
|
||||
case DOUBLE:
|
||||
return ExprEval.ofDoubleArray(new Object[]{value.doubleValue()});
|
||||
return ofDoubleArray(new Object[]{value.doubleValue()});
|
||||
case LONG:
|
||||
return ExprEval.ofLongArray(asArray());
|
||||
return ofLongArray(asArray());
|
||||
case STRING:
|
||||
return ExprEval.ofStringArray(new Object[]{value.toString()});
|
||||
return ofStringArray(new Object[]{value.toString()});
|
||||
default:
|
||||
ExpressionType elementType = (ExpressionType) castTo.getElementType();
|
||||
return new ArrayExprEval(castTo, new Object[]{castTo(elementType).value()});
|
||||
|
@ -1150,10 +1150,10 @@ public abstract class ExprEval<T>
|
|||
{
|
||||
switch (castTo.getType()) {
|
||||
case DOUBLE:
|
||||
return ExprEval.ofDouble(computeNumber());
|
||||
return ofDouble(computeNumber());
|
||||
|
||||
case LONG:
|
||||
return ExprEval.ofLong(computeNumber());
|
||||
return ofLong(computeNumber());
|
||||
|
||||
case STRING:
|
||||
return this;
|
||||
|
@ -1165,12 +1165,12 @@ public abstract class ExprEval<T>
|
|||
ExprType type = castTo.getElementType().getType();
|
||||
if (type == ExprType.DOUBLE) {
|
||||
final Number number = computeNumber();
|
||||
return ExprEval.ofDoubleArray(new Object[]{number == null ? null : number.doubleValue()});
|
||||
return ofDoubleArray(new Object[]{number == null ? null : number.doubleValue()});
|
||||
} else if (type == ExprType.LONG) {
|
||||
final Number number = computeNumber();
|
||||
return ExprEval.ofLongArray(new Object[]{number == null ? null : number.longValue()});
|
||||
return ofLongArray(new Object[]{number == null ? null : number.longValue()});
|
||||
} else if (type == ExprType.STRING) {
|
||||
return ExprEval.ofStringArray(new Object[]{value});
|
||||
return ofStringArray(new Object[]{value});
|
||||
}
|
||||
|
||||
ExpressionType elementType = (ExpressionType) castTo.getElementType();
|
||||
|
@ -1335,7 +1335,7 @@ public abstract class ExprEval<T>
|
|||
if (castTo.isArray()) {
|
||||
return new ArrayExprEval(castTo, null);
|
||||
}
|
||||
return ExprEval.ofType(castTo, null);
|
||||
return ofType(castTo, null);
|
||||
}
|
||||
if (type().equals(castTo)) {
|
||||
return this;
|
||||
|
@ -1343,26 +1343,26 @@ public abstract class ExprEval<T>
|
|||
switch (castTo.getType()) {
|
||||
case STRING:
|
||||
if (value.length == 1) {
|
||||
return ExprEval.of(asString());
|
||||
return of(asString());
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
if (value.length == 1) {
|
||||
return isNumericNull() ? ExprEval.ofLong(null) : ExprEval.ofLong(asLong());
|
||||
return isNumericNull() ? ofLong(null) : ofLong(asLong());
|
||||
}
|
||||
break;
|
||||
case DOUBLE:
|
||||
if (value.length == 1) {
|
||||
return isNumericNull() ? ExprEval.ofDouble(null) : ExprEval.ofDouble(asDouble());
|
||||
return isNumericNull() ? ofDouble(null) : ofDouble(asDouble());
|
||||
}
|
||||
break;
|
||||
case ARRAY:
|
||||
ExpressionType elementType = (ExpressionType) castTo.getElementType();
|
||||
Object[] cast = new Object[value.length];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
cast[i] = ExprEval.ofType(elementType(), value[i]).castTo(elementType).value();
|
||||
cast[i] = ofType(elementType(), value[i]).castTo(elementType).value();
|
||||
}
|
||||
return ExprEval.ofArray(castTo, cast);
|
||||
return ofArray(castTo, cast);
|
||||
case COMPLEX:
|
||||
if (ExpressionType.NESTED_DATA.equals(castTo)) {
|
||||
return new NestedDataExprEval(value);
|
||||
|
@ -1536,7 +1536,7 @@ public abstract class ExprEval<T>
|
|||
} else if (val instanceof Boolean) {
|
||||
number = Evals.asLong((Boolean) val);
|
||||
} else if (val instanceof String) {
|
||||
number = ExprEval.computeNumber((String) val);
|
||||
number = computeNumber((String) val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1546,7 +1546,7 @@ public abstract class ExprEval<T>
|
|||
public Object[] asArray()
|
||||
{
|
||||
Object val = StructuredData.unwrap(value);
|
||||
ExprEval maybeArray = ExprEval.bestEffortOf(val);
|
||||
ExprEval maybeArray = bestEffortOf(val);
|
||||
if (maybeArray.type().isPrimitive() || maybeArray.isArray()) {
|
||||
return maybeArray.asArray();
|
||||
}
|
||||
|
@ -1561,7 +1561,7 @@ public abstract class ExprEval<T>
|
|||
}
|
||||
|
||||
Object val = StructuredData.unwrap(value);
|
||||
ExprEval bestEffortOf = ExprEval.bestEffortOf(val);
|
||||
ExprEval bestEffortOf = bestEffortOf(val);
|
||||
|
||||
if (bestEffortOf.type().isPrimitive() || bestEffortOf.type().isArray()) {
|
||||
return bestEffortOf.castTo(castTo);
|
||||
|
|
|
@ -159,7 +159,7 @@ public class ExprMacroTable
|
|||
macro.name(),
|
||||
args.size() == 1
|
||||
? args.get(0).stringify()
|
||||
: Expr.ARG_JOINER.join(args.stream().map(Expr::stringify).iterator())
|
||||
: ARG_JOINER.join(args.stream().map(Expr::stringify).iterator())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -49,6 +49,6 @@ public enum ExprType implements TypeDescriptor
|
|||
@Override
|
||||
public boolean isArray()
|
||||
{
|
||||
return this == ExprType.ARRAY;
|
||||
return this == ARRAY;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ExpressionType extends BaseTypeSignature<ExprType>
|
|||
public static final ExpressionType DOUBLE_ARRAY =
|
||||
new ExpressionType(ExprType.ARRAY, null, DOUBLE);
|
||||
public static final ExpressionType NESTED_DATA =
|
||||
ExpressionType.fromColumnTypeStrict(ColumnType.NESTED_DATA);
|
||||
fromColumnTypeStrict(ColumnType.NESTED_DATA);
|
||||
public static final ExpressionType UNKNOWN_COMPLEX =
|
||||
new ExpressionType(ExprType.COMPLEX, null, null);
|
||||
|
||||
|
|
|
@ -241,7 +241,7 @@ public class ExpressionTypeConversion
|
|||
@Nullable
|
||||
public static ExpressionType integerMathFunction(@Nullable ExpressionType type, @Nullable ExpressionType other)
|
||||
{
|
||||
final ExpressionType functionType = ExpressionTypeConversion.function(type, other);
|
||||
final ExpressionType functionType = function(type, other);
|
||||
// any number is long
|
||||
return Types.isNumeric(functionType) ? ExpressionType.LONG : functionType;
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ public class Parser
|
|||
*/
|
||||
public static Supplier<Expr> lazyParse(@Nullable String in, ExprMacroTable macroTable)
|
||||
{
|
||||
return Suppliers.memoize(() -> in == null ? null : Parser.parse(in, macroTable));
|
||||
return Suppliers.memoize(() -> in == null ? null : parse(in, macroTable));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -313,7 +313,7 @@ public class JoinDataSource implements DataSource
|
|||
DimFilter joinBaseFilter = analysis.getJoinBaseTableFilter().orElse(null);
|
||||
|
||||
for (final PreJoinableClause clause : analysis.getPreJoinableClauses()) {
|
||||
current = JoinDataSource.create(
|
||||
current = create(
|
||||
current,
|
||||
clause.getDataSource(),
|
||||
clause.getPrefix(),
|
||||
|
|
|
@ -219,7 +219,7 @@ public class AggregatorUtil
|
|||
)
|
||||
{
|
||||
|
||||
List<PostAggregator> condensedPostAggs = AggregatorUtil.pruneDependentPostAgg(postAggList, metric);
|
||||
List<PostAggregator> condensedPostAggs = pruneDependentPostAgg(postAggList, metric);
|
||||
// calculate dependent aggregators for these postAgg
|
||||
Set<String> dependencySet = new HashSet<>();
|
||||
dependencySet.add(metric);
|
||||
|
@ -427,7 +427,7 @@ public class AggregatorUtil
|
|||
return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
|
||||
.put(aggregatorType)
|
||||
.put(fieldNameBytes)
|
||||
.put(AggregatorUtil.STRING_SEPARATOR)
|
||||
.put(STRING_SEPARATOR)
|
||||
.put(expressionBytes)
|
||||
.array();
|
||||
});
|
||||
|
|
|
@ -119,7 +119,7 @@ public class StringAnyAggregatorFactory extends AggregatorFactory
|
|||
@Override
|
||||
public Comparator getComparator()
|
||||
{
|
||||
return StringAnyAggregatorFactory.VALUE_COMPARATOR;
|
||||
return VALUE_COMPARATOR;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -150,7 +150,7 @@ public class StringFirstAggregatorFactory extends AggregatorFactory
|
|||
this.fieldName = fieldName;
|
||||
this.timeColumn = timeColumn == null ? ColumnHolder.TIME_COLUMN_NAME : timeColumn;
|
||||
this.maxStringBytes = maxStringBytes == null
|
||||
? StringFirstAggregatorFactory.DEFAULT_MAX_STRING_SIZE
|
||||
? DEFAULT_MAX_STRING_SIZE
|
||||
: maxStringBytes;
|
||||
}
|
||||
|
||||
|
|
|
@ -265,7 +265,7 @@ public class ArithmeticPostAggregator implements PostAggregator
|
|||
private static final Map<String, Ops> LOOKUP_MAP = new HashMap<>();
|
||||
|
||||
static {
|
||||
for (Ops op : Ops.values()) {
|
||||
for (Ops op : values()) {
|
||||
LOOKUP_MAP.put(op.getFn(), op);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ public class DataSourceMetadataQuery extends BaseQuery<Result<DataSourceMetadata
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.DATASOURCE_METADATA;
|
||||
return DATASOURCE_METADATA;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -789,7 +789,7 @@ public class InDimFilter extends AbstractOptimizableDimFilter implements Filter
|
|||
*/
|
||||
public static ValuesSet of(@Nullable final String value)
|
||||
{
|
||||
final ValuesSet retVal = ValuesSet.create();
|
||||
final ValuesSet retVal = create();
|
||||
retVal.add(NullHandling.emptyToNullIfNeeded(value));
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -1196,7 +1196,7 @@ public class GroupByQuery extends BaseQuery<ResultRow>
|
|||
|
||||
public Builder queryId(String queryId)
|
||||
{
|
||||
context = BaseQuery.computeOverriddenContext(context, ImmutableMap.of(BaseQuery.QUERY_ID, queryId));
|
||||
context = computeOverriddenContext(context, ImmutableMap.of(QUERY_ID, queryId));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -502,7 +502,7 @@ public class GroupingEngine
|
|||
closer.register(bufferHolder);
|
||||
try {
|
||||
final String fudgeTimestampString = NullHandling.emptyToNullIfNeeded(
|
||||
query.context().getString(GroupingEngine.CTX_KEY_FUDGE_TIMESTAMP)
|
||||
query.context().getString(CTX_KEY_FUDGE_TIMESTAMP)
|
||||
);
|
||||
|
||||
final DateTime fudgeTimestamp = fudgeTimestampString == null
|
||||
|
|
|
@ -91,7 +91,7 @@ public final class ResultRow
|
|||
public static ResultRow fromLegacyRow(Row row, final GroupByQuery query)
|
||||
{
|
||||
// Can't be sure if we'll get result rows with or without postaggregations, so be safe.
|
||||
final ResultRow resultRow = ResultRow.create(query.getResultRowSizeWithPostAggregators());
|
||||
final ResultRow resultRow = create(query.getResultRowSizeWithPostAggregators());
|
||||
|
||||
int i = 0;
|
||||
if (query.getResultRowHasTimestamp()) {
|
||||
|
|
|
@ -449,7 +449,7 @@ public class RowBasedGrouperHelper
|
|||
final ColumnSelectorFactory columnSelectorFactory =
|
||||
query.getVirtualColumns()
|
||||
.wrap(
|
||||
RowBasedGrouperHelper.createResultRowBasedColumnSelectorFactory(
|
||||
createResultRowBasedColumnSelectorFactory(
|
||||
subquery,
|
||||
rowSupplier,
|
||||
RowSignature.Finalization.UNKNOWN
|
||||
|
|
|
@ -55,7 +55,7 @@ public class OrderByColumnSpec
|
|||
private static final Map<String, Direction> STUPID_ENUM_MAP;
|
||||
static {
|
||||
final ImmutableMap.Builder<String, Direction> bob = ImmutableMap.builder();
|
||||
for (Direction direction : Direction.values()) {
|
||||
for (Direction direction : values()) {
|
||||
bob.put(direction.name(), direction);
|
||||
}
|
||||
STUPID_ENUM_MAP = bob.build();
|
||||
|
@ -75,7 +75,7 @@ public class OrderByColumnSpec
|
|||
Direction direction = STUPID_ENUM_MAP.get(upperName);
|
||||
|
||||
if (direction == null) {
|
||||
for (Direction dir : Direction.values()) {
|
||||
for (Direction dir : values()) {
|
||||
if (dir.name().startsWith(upperName)) {
|
||||
if (direction != null) {
|
||||
throw new ISE("Ambiguous directions[%s] and [%s]", direction, dir);
|
||||
|
|
|
@ -151,7 +151,7 @@ public class ColumnAnalysis
|
|||
}
|
||||
|
||||
if (isError() && rhs.isError()) {
|
||||
return errorMessage.equals(rhs.getErrorMessage()) ? this : ColumnAnalysis.error("multiple_errors");
|
||||
return errorMessage.equals(rhs.getErrorMessage()) ? this : error("multiple_errors");
|
||||
} else if (isError()) {
|
||||
return this;
|
||||
} else if (rhs.isError()) {
|
||||
|
@ -159,13 +159,13 @@ public class ColumnAnalysis
|
|||
}
|
||||
|
||||
if (!Objects.equals(type, rhs.getType())) {
|
||||
return ColumnAnalysis.error(
|
||||
return error(
|
||||
StringUtils.format("cannot_merge_diff_types: [%s] and [%s]", type, rhs.getType())
|
||||
);
|
||||
}
|
||||
|
||||
if (!Objects.equals(typeSignature, rhs.getTypeSignature())) {
|
||||
return ColumnAnalysis.error(
|
||||
return error(
|
||||
StringUtils.format(
|
||||
"cannot_merge_diff_types: [%s] and [%s]",
|
||||
typeSignature.asTypeString(),
|
||||
|
|
|
@ -164,7 +164,7 @@ public class SegmentMetadataQuery extends BaseQuery<SegmentAnalysis>
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.SEGMENT_METADATA;
|
||||
return SEGMENT_METADATA;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
|
|
@ -186,7 +186,7 @@ public class WindowOperatorQuery extends BaseQuery<RowsAndColumns>
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.WINDOW_OPERATOR;
|
||||
return WINDOW_OPERATOR;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.LEXICOGRAPHIC_NAME;
|
||||
return LEXICOGRAPHIC_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -286,7 +286,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.ALPHANUMERIC_NAME;
|
||||
return ALPHANUMERIC_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -340,7 +340,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.STRLEN_NAME;
|
||||
return STRLEN_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -404,7 +404,7 @@ public class StringComparators
|
|||
|
||||
if (bd1 == null && bd2 == null) {
|
||||
// both Strings are unparseable, just compare lexicographically to have a well-defined ordering
|
||||
return StringComparators.LEXICOGRAPHIC.compare(o1, o2);
|
||||
return LEXICOGRAPHIC.compare(o1, o2);
|
||||
}
|
||||
|
||||
if (bd1 == null) {
|
||||
|
@ -417,7 +417,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.NUMERIC_NAME;
|
||||
return NUMERIC_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -469,7 +469,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.VERSION_NAME;
|
||||
return VERSION_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -520,7 +520,7 @@ public class StringComparators
|
|||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return StringComparators.NATURAL_NAME;
|
||||
return NATURAL_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -205,7 +205,7 @@ public class MapOfColumnsRowsAndColumns implements RowsAndColumns
|
|||
|
||||
public MapOfColumnsRowsAndColumns build()
|
||||
{
|
||||
return MapOfColumnsRowsAndColumns.fromMap(cols);
|
||||
return fromMap(cols);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.SEARCH;
|
||||
return SEARCH;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -89,7 +89,7 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.TIME_BOUNDARY;
|
||||
return TIME_BOUNDARY;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
|
|
@ -120,7 +120,7 @@ public class TimeseriesQuery extends BaseQuery<Result<TimeseriesResultValue>>
|
|||
@Override
|
||||
public String getType()
|
||||
{
|
||||
return Query.TIMESERIES;
|
||||
return TIMESERIES;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
|
|
@ -81,28 +81,28 @@ public class PooledTopNAlgorithm
|
|||
@VisibleForTesting
|
||||
static void setSpecializeGeneric1AggPooledTopN(boolean value)
|
||||
{
|
||||
PooledTopNAlgorithm.SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = value;
|
||||
SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = value;
|
||||
computeSpecializedScanAndAggregateImplementations();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void setSpecializeGeneric2AggPooledTopN(boolean value)
|
||||
{
|
||||
PooledTopNAlgorithm.SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = value;
|
||||
SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = value;
|
||||
computeSpecializedScanAndAggregateImplementations();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void setSpecializeHistorical1SimpleDoubleAggPooledTopN(boolean value)
|
||||
{
|
||||
PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
|
||||
SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
|
||||
computeSpecializedScanAndAggregateImplementations();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void setSpecializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN(boolean value)
|
||||
{
|
||||
PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
|
||||
SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
|
||||
computeSpecializedScanAndAggregateImplementations();
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ public class AutoTypeColumnIndexer implements DimensionIndexer<StructuredData, S
|
|||
ExprEval<?> eval = ExprEval.bestEffortOf(fieldValue);
|
||||
FieldIndexer fieldIndexer = fieldIndexers.get(fieldName);
|
||||
if (fieldIndexer == null) {
|
||||
estimatedFieldKeySize += StructuredDataProcessor.estimateStringSize(fieldName);
|
||||
estimatedFieldKeySize += estimateStringSize(fieldName);
|
||||
fieldIndexer = new FieldIndexer(globalDictionary);
|
||||
fieldIndexers.put(fieldName, fieldIndexer);
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public class AutoTypeColumnIndexer implements DimensionIndexer<StructuredData, S
|
|||
final String fieldName = NestedPathFinder.toNormalizedJsonPath(fieldPath);
|
||||
FieldIndexer fieldIndexer = fieldIndexers.get(fieldName);
|
||||
if (fieldIndexer == null) {
|
||||
estimatedFieldKeySize += StructuredDataProcessor.estimateStringSize(fieldName);
|
||||
estimatedFieldKeySize += estimateStringSize(fieldName);
|
||||
fieldIndexer = new FieldIndexer(globalDictionary);
|
||||
fieldIndexers.put(fieldName, fieldIndexer);
|
||||
}
|
||||
|
|
|
@ -327,7 +327,7 @@ public final class DimensionHandlerUtils
|
|||
} else if (valObj instanceof Boolean) {
|
||||
return Evals.asLong((Boolean) valObj);
|
||||
} else if (valObj instanceof String) {
|
||||
Long ret = DimensionHandlerUtils.getExactLongFromDecimalString((String) valObj);
|
||||
Long ret = getExactLongFromDecimalString((String) valObj);
|
||||
if (reportParseExceptions && ret == null) {
|
||||
final String message;
|
||||
if (objectKey != null) {
|
||||
|
@ -518,7 +518,7 @@ public final class DimensionHandlerUtils
|
|||
case ARRAY:
|
||||
return coerceToObjectArrayWithElementCoercionFunction(
|
||||
obj,
|
||||
x -> DimensionHandlerUtils.convertObjectToType(x, type.getElementType(), reportParseExceptions, fieldName)
|
||||
x -> convertObjectToType(x, type.getElementType(), reportParseExceptions, fieldName)
|
||||
);
|
||||
case COMPLEX:
|
||||
// Can't coerce complex objects, and we shouldn't need to. If in future selectors behave weirdly, or we need to
|
||||
|
|
|
@ -1232,7 +1232,7 @@ public class IndexMergerV9 implements IndexMerger
|
|||
|
||||
List<File> tempDirs = new ArrayList<>();
|
||||
|
||||
if (maxColumnsToMerge == IndexMerger.UNLIMITED_MAX_COLUMNS_TO_MERGE) {
|
||||
if (maxColumnsToMerge == UNLIMITED_MAX_COLUMNS_TO_MERGE) {
|
||||
return merge(
|
||||
indexes,
|
||||
rollup,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue