Clean codebase from empty statements (#37822)
* Remove empty statements There are a couple of instances of undocumented empty statements all across the code base. While they are mostly harmless, they make the code hard to read and are potentially error-prone. Removing most of these instances and marking blocks that look empty by intention as such. * Change test, slightly more verbose but less confusing
This commit is contained in:
parent
49073dd2f6
commit
b4b4cd6ebd
|
@ -37,7 +37,7 @@ public class MultiTermVectorsRequest implements ToXContentObject, Validatable {
|
|||
* Constructs an empty MultiTermVectorsRequest
|
||||
* After that use {@code add} method to add individual {@code TermVectorsRequest} to it.
|
||||
*/
|
||||
public MultiTermVectorsRequest() {};
|
||||
public MultiTermVectorsRequest() {}
|
||||
|
||||
/**
|
||||
* Constructs a MultiTermVectorsRequest from the given document ids
|
||||
|
|
|
@ -38,7 +38,7 @@ public class DetectionRule implements ToXContentObject {
|
|||
public static final ParseField CONDITIONS_FIELD = new ParseField("conditions");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER =
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new);;
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareStringArray(Builder::setActions, ACTIONS_FIELD);
|
||||
|
|
|
@ -297,7 +297,7 @@ public class ObjectParserTests extends ESTestCase {
|
|||
|
||||
enum TestEnum {
|
||||
FOO, BAR
|
||||
};
|
||||
}
|
||||
|
||||
public void testParseEnumFromString() throws IOException {
|
||||
class TestStruct {
|
||||
|
|
|
@ -69,7 +69,7 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory {
|
|||
+ KeepTypesMode.EXCLUDE + "] but was [" + modeString + "].");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
|
|
@ -37,7 +37,7 @@ public interface MetricDetail extends ToXContentObject, NamedWriteable {
|
|||
innerToXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder.endObject();
|
||||
};
|
||||
}
|
||||
|
||||
default String getMetricName() {
|
||||
return getWriteableName();
|
||||
|
|
|
@ -60,7 +60,7 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
public void execute() {
|
||||
scriptBody.accept(getCtx());
|
||||
}
|
||||
};;
|
||||
};
|
||||
when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory);
|
||||
AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(mockScript("")));
|
||||
RequestWrapper<?> result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc);
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
|
@ -44,6 +43,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -60,12 +60,12 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.ListIterator;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
@ -301,8 +302,8 @@ public class CollectionUtils {
|
|||
public int size() {
|
||||
return in.size();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
public static void sort(final BytesRefArray bytes, final int[] indices) {
|
||||
sort(new BytesRefBuilder(), new BytesRefBuilder(), bytes, indices);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ public class PreBuiltCacheFactory {
|
|||
* ELASTICSEARCH Exactly one version per elasticsearch version is stored. Useful if you change an analyzer between elasticsearch
|
||||
* releases, when the lucene version does not change
|
||||
*/
|
||||
public enum CachingStrategy { ONE, LUCENE, ELASTICSEARCH };
|
||||
public enum CachingStrategy { ONE, LUCENE, ELASTICSEARCH }
|
||||
|
||||
public interface PreBuiltCache<T> {
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ public final class ScoreScriptUtils {
|
|||
this.originLat = origin.lat();
|
||||
this.originLon = origin.lon();
|
||||
this.offset = DistanceUnit.DEFAULT.parse(offsetStr, DistanceUnit.DEFAULT);
|
||||
this.scaling = 0.5 * Math.pow(scale, 2.0) / Math.log(decay);;
|
||||
this.scaling = 0.5 * Math.pow(scale, 2.0) / Math.log(decay);
|
||||
}
|
||||
|
||||
public double decayGeoGauss(GeoPoint docValue) {
|
||||
|
|
|
@ -78,7 +78,7 @@ public class EarlyTerminatingCollector extends FilterCollector {
|
|||
}
|
||||
}
|
||||
super.collect(doc);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
this.setNestedPath(template.getNestedPath());
|
||||
if (template.getNestedSort() != null) {
|
||||
this.setNestedSort(template.getNestedSort());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -76,7 +76,7 @@ public class RestoreSnapshotRequestTests extends AbstractWireSerializingTestCase
|
|||
int count = randomInt(3) + 1;
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));;
|
||||
indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));
|
||||
}
|
||||
instance.indexSettings(indexSettings);
|
||||
}
|
||||
|
|
|
@ -83,5 +83,5 @@ public class GroupShardsIteratorTests extends ESTestCase {
|
|||
shardRouting = ShardRoutingHelper.moveToStarted(shardRouting);
|
||||
}
|
||||
return shardRouting;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,8 +34,10 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -260,8 +262,10 @@ public class TaskBatcherTests extends TaskExecutorTests {
|
|||
Map<Integer, TestListener> tasks = new HashMap<>();
|
||||
final int numOfTasks = randomInt(10);
|
||||
final CountDownLatch latch = new CountDownLatch(numOfTasks);
|
||||
Set<Integer> usedKeys = new HashSet<>(numOfTasks);
|
||||
for (int i = 0; i < numOfTasks; i++) {
|
||||
while (null != tasks.put(randomInt(1024), new TestListener() {
|
||||
int key = randomValueOtherThanMany(k -> usedKeys.contains(k), () -> randomInt(1024));
|
||||
tasks.put(key, new TestListener() {
|
||||
@Override
|
||||
public void processed(String source) {
|
||||
latch.countDown();
|
||||
|
@ -271,8 +275,10 @@ public class TaskBatcherTests extends TaskExecutorTests {
|
|||
public void onFailure(String source, Exception e) {
|
||||
fail(ExceptionsHelper.detailedMessage(e));
|
||||
}
|
||||
})) ;
|
||||
});
|
||||
usedKeys.add(key);
|
||||
}
|
||||
assert usedKeys.size() == numOfTasks;
|
||||
|
||||
TestExecutor<Integer> executor = taskList -> {
|
||||
assertThat(taskList.size(), equalTo(tasks.size()));
|
||||
|
|
|
@ -45,7 +45,7 @@ public class GeoDistanceTests extends ESTestCase {
|
|||
GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.ARC);
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
geoDistance.writeTo(out);
|
||||
try (StreamInput in = out.bytes().streamInput()) {;
|
||||
try (StreamInput in = out.bytes().streamInput()) {
|
||||
GeoDistance copy = GeoDistance.readFromStream(in);
|
||||
assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance);
|
||||
}
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -59,7 +58,7 @@ public class CircleBuilderTests extends AbstractShapeBuilderTestCase<CircleBuild
|
|||
DistanceUnit newRandom = unit;
|
||||
while (newRandom == unit) {
|
||||
newRandom = randomFrom(DistanceUnit.values());
|
||||
};
|
||||
}
|
||||
unit = newRandom;
|
||||
}
|
||||
return mutation.radius(radius, unit);
|
||||
|
|
|
@ -570,7 +570,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReadWriteGeoPoint() throws IOException {
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {;
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||
out.writeGenericValue(geoPoint);
|
||||
StreamInput wrap = out.bytes().streamInput();
|
||||
|
|
|
@ -67,7 +67,7 @@ public class AsyncIOProcessorTests extends ESTestCase {
|
|||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
thread[i].start();
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public class AsyncIOProcessorTests extends ESTestCase {
|
|||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
thread[i].start();
|
||||
}
|
||||
|
|
|
@ -392,7 +392,7 @@ public class LiveVersionMapTests extends ESTestCase {
|
|||
public void testPruneTombstonesWhileLocked() throws InterruptedException, IOException {
|
||||
LiveVersionMap map = new LiveVersionMap();
|
||||
BytesRef uid = uid("1");
|
||||
;
|
||||
|
||||
try (Releasable ignore = map.acquireLock(uid)) {
|
||||
map.putDeleteUnderLock(uid, new DeleteVersionValue(0, 0, 0, 0));
|
||||
map.beforeRefresh(); // refresh otherwise we won't prune since it's tracked by the current map
|
||||
|
|
|
@ -51,8 +51,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -81,7 +81,7 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
|
|
@ -861,6 +861,6 @@ public class FunctionScoreTests extends ESTestCase {
|
|||
@Override
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,10 +56,10 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
|
|||
newIndices[i] = randomSimpleString(random(), 1, 30);
|
||||
}
|
||||
request.indices(newIndices);
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.indices()[i]);
|
||||
}
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.getSearchRequest().indices()[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,10 +49,10 @@ public class UpdateByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
|
|||
newIndices[i] = randomSimpleString(random(), 1, 30);
|
||||
}
|
||||
request.indices(newIndices);
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.indices()[i]);
|
||||
}
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.getSearchRequest().indices()[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,9 +28,6 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.Percentile;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket;
|
||||
import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -192,7 +189,6 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
|
|||
String name = instance.getName();
|
||||
double[] percents = extractPercents(instance);
|
||||
double[] percentiles = extractPercentiles(instance);
|
||||
;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
|
|
|
@ -152,7 +152,7 @@ public class NestedSortBuilderTests extends ESTestCase {
|
|||
@Override
|
||||
protected QueryBuilder doRewrite(org.elasticsearch.index.query.QueryRewriteContext queryShardContext) throws IOException {
|
||||
return new MatchAllQueryBuilder();
|
||||
};
|
||||
}
|
||||
};
|
||||
// test that filter gets rewritten
|
||||
NestedSortBuilder original = new NestedSortBuilder("path").setFilter(filterThatRewrites);
|
||||
|
|
|
@ -40,7 +40,7 @@ public class EqualsHashCodeTestUtils {
|
|||
*/
|
||||
public interface CopyFunction<T> {
|
||||
T copy(T t) throws IOException;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* A function that creates a copy of its input argument that is different from its
|
||||
|
@ -48,7 +48,7 @@ public class EqualsHashCodeTestUtils {
|
|||
*/
|
||||
public interface MutateFunction<T> {
|
||||
T mutate(T t) throws IOException;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform common equality and hashCode checks on the input object
|
||||
|
|
|
@ -324,7 +324,7 @@ public final class FrozenEngine extends ReadOnlyEngine {
|
|||
@Override
|
||||
public LeafReader wrap(LeafReader reader) {
|
||||
return new LazyLeafReader(reader);
|
||||
};
|
||||
}
|
||||
});
|
||||
this.delegate = reader;
|
||||
this.engine = engine;
|
||||
|
|
|
@ -45,14 +45,14 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public static final String MISSING_FIELD_COUNT_STR = "missing_field_count";
|
||||
public static final String OUT_OF_ORDER_TIME_COUNT_STR = "out_of_order_timestamp_count";
|
||||
public static final String EMPTY_BUCKET_COUNT_STR = "empty_bucket_count";
|
||||
public static final String SPARSE_BUCKET_COUNT_STR = "sparse_bucket_count";
|
||||
public static final String SPARSE_BUCKET_COUNT_STR = "sparse_bucket_count";
|
||||
public static final String BUCKET_COUNT_STR = "bucket_count";
|
||||
public static final String EARLIEST_RECORD_TIME_STR = "earliest_record_timestamp";
|
||||
public static final String LATEST_RECORD_TIME_STR = "latest_record_timestamp";
|
||||
public static final String LAST_DATA_TIME_STR = "last_data_time";
|
||||
public static final String LATEST_EMPTY_BUCKET_TIME_STR = "latest_empty_bucket_timestamp";
|
||||
public static final String LATEST_SPARSE_BUCKET_TIME_STR = "latest_sparse_bucket_timestamp";
|
||||
|
||||
|
||||
public static final ParseField PROCESSED_RECORD_COUNT = new ParseField(PROCESSED_RECORD_COUNT_STR);
|
||||
public static final ParseField PROCESSED_FIELD_COUNT = new ParseField(PROCESSED_FIELD_COUNT_STR);
|
||||
public static final ParseField INPUT_BYTES = new ParseField(INPUT_BYTES_STR);
|
||||
|
@ -68,7 +68,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public static final ParseField LATEST_RECORD_TIME = new ParseField(LATEST_RECORD_TIME_STR);
|
||||
public static final ParseField LAST_DATA_TIME = new ParseField(LAST_DATA_TIME_STR);
|
||||
public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField(LATEST_EMPTY_BUCKET_TIME_STR);
|
||||
public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField(LATEST_SPARSE_BUCKET_TIME_STR);
|
||||
public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField(LATEST_SPARSE_BUCKET_TIME_STR);
|
||||
|
||||
public static final ParseField TYPE = new ParseField("data_counts");
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
p -> TimeUtils.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), LATEST_EMPTY_BUCKET_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
|
||||
p -> TimeUtils.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), LATEST_SPARSE_BUCKET_TIME, ValueType.VALUE);
|
||||
PARSER.declareLong((t, u) -> {;}, INPUT_RECORD_COUNT);
|
||||
PARSER.declareLong((t, u) -> {/* intentionally empty */}, INPUT_RECORD_COUNT);
|
||||
}
|
||||
|
||||
public static String documentId(String jobId) {
|
||||
|
@ -131,7 +131,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public DataCounts(String jobId, long processedRecordCount, long processedFieldCount, long inputBytes,
|
||||
long inputFieldCount, long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount,
|
||||
long emptyBucketCount, long sparseBucketCount, long bucketCount,
|
||||
Date earliestRecordTimeStamp, Date latestRecordTimeStamp, Date lastDataTimeStamp,
|
||||
Date earliestRecordTimeStamp, Date latestRecordTimeStamp, Date lastDataTimeStamp,
|
||||
Date latestEmptyBucketTimeStamp, Date latestSparseBucketTimeStamp) {
|
||||
this.jobId = jobId;
|
||||
this.processedRecordCount = processedRecordCount;
|
||||
|
@ -195,7 +195,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
if (in.readBoolean()) {
|
||||
lastDataTimeStamp = new Date(in.readVLong());
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
if (in.readBoolean()) {
|
||||
latestEmptyBucketTimeStamp = new Date(in.readVLong());
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
|
@ -346,9 +346,9 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public void incrementEmptyBucketCount(long additional) {
|
||||
emptyBucketCount += additional;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The number of buckets with few records compared to the overall counts.
|
||||
* The number of buckets with few records compared to the overall counts.
|
||||
* Used to measure general data fitness and/or configuration problems (bucket span).
|
||||
*
|
||||
* @return Number of sparse buckets processed by this job {@code long}
|
||||
|
@ -360,7 +360,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public void incrementSparseBucketCount(long additional) {
|
||||
sparseBucketCount += additional;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The number of buckets overall.
|
||||
*
|
||||
|
@ -443,7 +443,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
public void setLatestEmptyBucketTimeStamp(Date latestEmptyBucketTimeStamp) {
|
||||
this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp;
|
||||
}
|
||||
|
||||
|
||||
public void updateLatestEmptyBucketTimeStamp(Date latestEmptyBucketTimeStamp) {
|
||||
if (latestEmptyBucketTimeStamp != null &&
|
||||
(this.latestEmptyBucketTimeStamp == null ||
|
||||
|
@ -472,7 +472,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
|
|
|
@ -1214,7 +1214,7 @@ public class Cron implements ToXContentFragment {
|
|||
|
||||
private static int skipWhiteSpace(int i, String s) {
|
||||
for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) {
|
||||
;
|
||||
// intentionally empty
|
||||
}
|
||||
|
||||
return i;
|
||||
|
@ -1222,7 +1222,7 @@ public class Cron implements ToXContentFragment {
|
|||
|
||||
private static int findNextWhiteSpace(int i, String s) {
|
||||
for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) {
|
||||
;
|
||||
// intentionally empty
|
||||
}
|
||||
|
||||
return i;
|
||||
|
|
|
@ -167,6 +167,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
new AnalysisLimits(1L, 1L);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalysisLimits mutateInstance(AnalysisLimits instance) throws IOException {
|
||||
Long memoryModelLimit = instance.getModelMemoryLimit();
|
||||
Long categorizationExamplesLimit = instance.getCategorizationExamplesLimit();
|
||||
|
@ -197,5 +198,5 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new AnalysisLimits(memoryModelLimit, categorizationExamplesLimit);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -282,6 +282,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
return DataDescription.STRICT_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataDescription mutateInstance(DataDescription instance) throws java.io.IOException {
|
||||
DataFormat format = instance.getFormat();
|
||||
String timeField = instance.getTimeField();
|
||||
|
@ -320,5 +321,5 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new DataDescription(format, timeField, timeFormat, delimiter, quoteChar);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -239,7 +239,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
+ " \"network_bytes_out\": { \"type\":\"long\"}"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}");;
|
||||
+ "}");
|
||||
client().performRequest(createIndexRequest);
|
||||
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TransportForecastJobAction extends TransportJobTaskAction<ForecastJ
|
|||
ForecastParams params = paramsBuilder.build();
|
||||
processManager.forecastJob(task, params, e -> {
|
||||
if (e == null) {
|
||||
; getForecastRequestStats(request.getJobId(), params.getForecastId(), listener);
|
||||
getForecastRequestStats(request.getJobId(), params.getForecastId(), listener);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
public abstract class Normalizable implements ToXContentObject {
|
||||
public enum ChildType {BUCKET_INFLUENCER, RECORD};
|
||||
public enum ChildType {BUCKET_INFLUENCER, RECORD}
|
||||
|
||||
private final String indexName;
|
||||
private boolean hadBigNormalizedUpdate;
|
||||
|
|
|
@ -745,7 +745,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
return ua;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// to avoid creating duplicate functions
|
||||
|
@ -920,7 +920,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
return p;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//
|
||||
// Handle aggs in HAVING. To help folding any aggs not found in Aggregation
|
||||
|
|
|
@ -138,7 +138,7 @@ public class AttributeMap<E> {
|
|||
public String toString() {
|
||||
return set.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private final Map<AttributeWrapper, E> delegate;
|
||||
private Set<Attribute> keySet = null;
|
||||
|
|
|
@ -17,7 +17,7 @@ import java.util.Locale;
|
|||
import java.util.function.Function;
|
||||
|
||||
public class StringProcessor implements Processor {
|
||||
|
||||
|
||||
private interface StringFunction<R> {
|
||||
default R apply(Object o) {
|
||||
if (!(o instanceof String || o instanceof Character)) {
|
||||
|
@ -57,11 +57,11 @@ public class StringProcessor implements Processor {
|
|||
int i = n.intValue();
|
||||
if (i < 0) {
|
||||
return null;
|
||||
};
|
||||
}
|
||||
char[] spaces = new char[i];
|
||||
char whitespace = ' ';
|
||||
Arrays.fill(spaces, whitespace);
|
||||
|
||||
|
||||
return new String(spaces);
|
||||
}),
|
||||
BIT_LENGTH((String s) -> UnicodeUtil.calcUTF16toUTF8Length(s, 0, s.length()) * 8),
|
||||
|
@ -91,7 +91,7 @@ public class StringProcessor implements Processor {
|
|||
return this == CHAR ? "character" : super.toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static final String NAME = "s";
|
||||
|
||||
private final StringOperation processor;
|
||||
|
|
|
@ -1136,7 +1136,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return e.foldable() ? Literal.of(e) : e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class SimplifyConditional extends OptimizerExpressionRule {
|
||||
|
||||
SimplifyConditional() {
|
||||
|
@ -1355,7 +1355,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
* Propagate Equals to eliminate conjuncted Ranges.
|
||||
* When encountering a different Equals or non-containing {@link Range}, the conjunction becomes false.
|
||||
* When encountering a containing {@link Range}, the range gets eliminated by the equality.
|
||||
*
|
||||
*
|
||||
* This rule doesn't perform any promotion of {@link BinaryComparison}s, that is handled by
|
||||
* {@link CombineBinaryComparisons} on purpose as the resulting Range might be foldable
|
||||
* (which is picked by the folding rule on the next run).
|
||||
|
@ -1420,7 +1420,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
continue;
|
||||
}
|
||||
Object eqValue = eq.right().fold();
|
||||
|
||||
|
||||
for (int i = 0; i < ranges.size(); i++) {
|
||||
Range range = ranges.get(i);
|
||||
|
||||
|
@ -1448,14 +1448,14 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// it's in the range and thus, remove it
|
||||
ranges.remove(i);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, ranges)) : and;
|
||||
}
|
||||
}
|
||||
|
@ -1475,7 +1475,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
// combine conjunction
|
||||
private Expression combine(And and) {
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
|
@ -1504,7 +1504,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
exps.add(ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// finally try combining any left BinaryComparisons into possible Ranges
|
||||
// this could be a different rule but it's clearer here wrt the order of comparisons
|
||||
|
||||
|
@ -1513,14 +1513,14 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
for (int j = i + 1; j < bcs.size(); j++) {
|
||||
BinaryComparison other = bcs.get(j);
|
||||
|
||||
|
||||
if (main.left().semanticEquals(other.left())) {
|
||||
// >/>= AND </<=
|
||||
if ((main instanceof GreaterThan || main instanceof GreaterThanOrEqual)
|
||||
&& (other instanceof LessThan || other instanceof LessThanOrEqual)) {
|
||||
bcs.remove(j);
|
||||
bcs.remove(i);
|
||||
|
||||
|
||||
ranges.add(new Range(and.source(), main.left(),
|
||||
main.right(), main instanceof GreaterThanOrEqual,
|
||||
other.right(), other instanceof LessThanOrEqual));
|
||||
|
@ -1532,7 +1532,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
&& (main instanceof LessThan || main instanceof LessThanOrEqual)) {
|
||||
bcs.remove(j);
|
||||
bcs.remove(i);
|
||||
|
||||
|
||||
ranges.add(new Range(and.source(), main.left(),
|
||||
other.right(), other instanceof GreaterThanOrEqual,
|
||||
main.right(), main instanceof LessThanOrEqual));
|
||||
|
@ -1542,8 +1542,8 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, bcs, ranges)) : and;
|
||||
}
|
||||
|
||||
|
@ -1690,13 +1690,13 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
private boolean findConjunctiveComparisonInRange(BinaryComparison main, List<Range> ranges) {
|
||||
Object value = main.right().fold();
|
||||
|
||||
|
||||
// NB: the loop modifies the list (hence why the int is used)
|
||||
for (int i = 0; i < ranges.size(); i++) {
|
||||
Range other = ranges.get(i);
|
||||
|
||||
|
||||
if (main.left().semanticEquals(other.value())) {
|
||||
|
||||
|
||||
if (main instanceof GreaterThan || main instanceof GreaterThanOrEqual) {
|
||||
if (other.lower().foldable()) {
|
||||
Integer comp = BinaryComparison.compare(value, other.lower().fold());
|
||||
|
@ -1705,7 +1705,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
boolean lowerEq = comp == 0 && other.includeLower() && main instanceof GreaterThan;
|
||||
// 2 < a AND (1 < a < 3) -> 2 < a < 3
|
||||
boolean lower = comp > 0 || lowerEq;
|
||||
|
||||
|
||||
if (lower) {
|
||||
ranges.remove(i);
|
||||
ranges.add(i,
|
||||
|
@ -1745,14 +1745,14 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find commonalities between the given comparison in the given list.
|
||||
* The method can be applied both for conjunctive (AND) or disjunctive purposes (OR).
|
||||
*/
|
||||
private static boolean findExistingComparison(BinaryComparison main, List<BinaryComparison> bcs, boolean conjunctive) {
|
||||
Object value = main.right().fold();
|
||||
|
||||
|
||||
// NB: the loop modifies the list (hence why the int is used)
|
||||
for (int i = 0; i < bcs.size(); i++) {
|
||||
BinaryComparison other = bcs.get(i);
|
||||
|
@ -1763,10 +1763,10 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
// if bc is a higher/lower value or gte vs gt, use it instead
|
||||
if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual) &&
|
||||
(main instanceof GreaterThan || main instanceof GreaterThanOrEqual)) {
|
||||
|
||||
|
||||
if (main.left().semanticEquals(other.left())) {
|
||||
Integer compare = BinaryComparison.compare(value, other.right().fold());
|
||||
|
||||
|
||||
if (compare != null) {
|
||||
// AND
|
||||
if ((conjunctive &&
|
||||
|
@ -1794,10 +1794,10 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
// if bc is a lower/higher value or lte vs lt, use it instead
|
||||
else if ((other instanceof LessThan || other instanceof LessThanOrEqual) &&
|
||||
(main instanceof LessThan || main instanceof LessThanOrEqual)) {
|
||||
|
||||
|
||||
if (main.left().semanticEquals(other.left())) {
|
||||
Integer compare = BinaryComparison.compare(value, other.right().fold());
|
||||
|
||||
|
||||
if (compare != null) {
|
||||
// AND
|
||||
if ((conjunctive &&
|
||||
|
@ -1814,7 +1814,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
(compare == 0 && main instanceof LessThanOrEqual && other instanceof LessThan)))) {
|
||||
bcs.remove(i);
|
||||
bcs.add(i, main);
|
||||
|
||||
|
||||
}
|
||||
// found a match
|
||||
return true;
|
||||
|
@ -1824,7 +1824,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1951,5 +1951,5 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
enum TransformDirection {
|
||||
UP, DOWN
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ public class SqlParser {
|
|||
log.info(format(Locale.ROOT, " %-15s '%s'",
|
||||
symbolicName == null ? literalName : symbolicName,
|
||||
t.getText()));
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
ParserRuleContext tree = parseFunction.apply(parser);
|
||||
|
@ -145,7 +145,7 @@ public class SqlParser {
|
|||
}
|
||||
|
||||
private static void debug(SqlBaseParser parser) {
|
||||
|
||||
|
||||
// when debugging, use the exact prediction mode (needed for diagnostics as well)
|
||||
parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ public final class Cursors {
|
|||
|
||||
private static final NamedWriteableRegistry WRITEABLE_REGISTRY = new NamedWriteableRegistry(getNamedWriteables());
|
||||
|
||||
private Cursors() {};
|
||||
private Cursors() {}
|
||||
|
||||
/**
|
||||
* The {@link NamedWriteable}s required to deserialize {@link Cursor}s.
|
||||
|
|
Loading…
Reference in New Issue