Clean codebase from empty statements (#37822)
* Remove empty statements There are a couple of instances of undocumented empty statements all across the code base. While they are mostly harmless, they make the code hard to read and are potentially error-prone. Removing most of these instances and marking blocks that look empty by intention as such. * Change test, slightly more verbose but less confusing
This commit is contained in:
parent
49073dd2f6
commit
b4b4cd6ebd
|
@ -37,7 +37,7 @@ public class MultiTermVectorsRequest implements ToXContentObject, Validatable {
|
|||
* Constructs an empty MultiTermVectorsRequest
|
||||
* After that use {@code add} method to add individual {@code TermVectorsRequest} to it.
|
||||
*/
|
||||
public MultiTermVectorsRequest() {};
|
||||
public MultiTermVectorsRequest() {}
|
||||
|
||||
/**
|
||||
* Constructs a MultiTermVectorsRequest from the given document ids
|
||||
|
|
|
@ -38,7 +38,7 @@ public class DetectionRule implements ToXContentObject {
|
|||
public static final ParseField CONDITIONS_FIELD = new ParseField("conditions");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER =
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new);;
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareStringArray(Builder::setActions, ACTIONS_FIELD);
|
||||
|
|
|
@ -297,7 +297,7 @@ public class ObjectParserTests extends ESTestCase {
|
|||
|
||||
enum TestEnum {
|
||||
FOO, BAR
|
||||
};
|
||||
}
|
||||
|
||||
public void testParseEnumFromString() throws IOException {
|
||||
class TestStruct {
|
||||
|
|
|
@ -69,7 +69,7 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory {
|
|||
+ KeepTypesMode.EXCLUDE + "] but was [" + modeString + "].");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
|
|
@ -37,7 +37,7 @@ public interface MetricDetail extends ToXContentObject, NamedWriteable {
|
|||
innerToXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder.endObject();
|
||||
};
|
||||
}
|
||||
|
||||
default String getMetricName() {
|
||||
return getWriteableName();
|
||||
|
|
|
@ -60,7 +60,7 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
public void execute() {
|
||||
scriptBody.accept(getCtx());
|
||||
}
|
||||
};;
|
||||
};
|
||||
when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory);
|
||||
AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(mockScript("")));
|
||||
RequestWrapper<?> result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc);
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
|
@ -44,6 +43,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -60,12 +60,12 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.ListIterator;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
@ -301,8 +302,8 @@ public class CollectionUtils {
|
|||
public int size() {
|
||||
return in.size();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
public static void sort(final BytesRefArray bytes, final int[] indices) {
|
||||
sort(new BytesRefBuilder(), new BytesRefBuilder(), bytes, indices);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ public class PreBuiltCacheFactory {
|
|||
* ELASTICSEARCH Exactly one version per elasticsearch version is stored. Useful if you change an analyzer between elasticsearch
|
||||
* releases, when the lucene version does not change
|
||||
*/
|
||||
public enum CachingStrategy { ONE, LUCENE, ELASTICSEARCH };
|
||||
public enum CachingStrategy { ONE, LUCENE, ELASTICSEARCH }
|
||||
|
||||
public interface PreBuiltCache<T> {
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ public final class ScoreScriptUtils {
|
|||
this.originLat = origin.lat();
|
||||
this.originLon = origin.lon();
|
||||
this.offset = DistanceUnit.DEFAULT.parse(offsetStr, DistanceUnit.DEFAULT);
|
||||
this.scaling = 0.5 * Math.pow(scale, 2.0) / Math.log(decay);;
|
||||
this.scaling = 0.5 * Math.pow(scale, 2.0) / Math.log(decay);
|
||||
}
|
||||
|
||||
public double decayGeoGauss(GeoPoint docValue) {
|
||||
|
|
|
@ -78,7 +78,7 @@ public class EarlyTerminatingCollector extends FilterCollector {
|
|||
}
|
||||
}
|
||||
super.collect(doc);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
this.setNestedPath(template.getNestedPath());
|
||||
if (template.getNestedSort() != null) {
|
||||
this.setNestedSort(template.getNestedSort());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -76,7 +76,7 @@ public class RestoreSnapshotRequestTests extends AbstractWireSerializingTestCase
|
|||
int count = randomInt(3) + 1;
|
||||
|
||||
for (int i = 0; i < count; ++i) {
|
||||
indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));;
|
||||
indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));
|
||||
}
|
||||
instance.indexSettings(indexSettings);
|
||||
}
|
||||
|
|
|
@ -83,5 +83,5 @@ public class GroupShardsIteratorTests extends ESTestCase {
|
|||
shardRouting = ShardRoutingHelper.moveToStarted(shardRouting);
|
||||
}
|
||||
return shardRouting;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,8 +34,10 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -260,8 +262,10 @@ public class TaskBatcherTests extends TaskExecutorTests {
|
|||
Map<Integer, TestListener> tasks = new HashMap<>();
|
||||
final int numOfTasks = randomInt(10);
|
||||
final CountDownLatch latch = new CountDownLatch(numOfTasks);
|
||||
Set<Integer> usedKeys = new HashSet<>(numOfTasks);
|
||||
for (int i = 0; i < numOfTasks; i++) {
|
||||
while (null != tasks.put(randomInt(1024), new TestListener() {
|
||||
int key = randomValueOtherThanMany(k -> usedKeys.contains(k), () -> randomInt(1024));
|
||||
tasks.put(key, new TestListener() {
|
||||
@Override
|
||||
public void processed(String source) {
|
||||
latch.countDown();
|
||||
|
@ -271,8 +275,10 @@ public class TaskBatcherTests extends TaskExecutorTests {
|
|||
public void onFailure(String source, Exception e) {
|
||||
fail(ExceptionsHelper.detailedMessage(e));
|
||||
}
|
||||
})) ;
|
||||
});
|
||||
usedKeys.add(key);
|
||||
}
|
||||
assert usedKeys.size() == numOfTasks;
|
||||
|
||||
TestExecutor<Integer> executor = taskList -> {
|
||||
assertThat(taskList.size(), equalTo(tasks.size()));
|
||||
|
|
|
@ -45,7 +45,7 @@ public class GeoDistanceTests extends ESTestCase {
|
|||
GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.ARC);
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
geoDistance.writeTo(out);
|
||||
try (StreamInput in = out.bytes().streamInput()) {;
|
||||
try (StreamInput in = out.bytes().streamInput()) {
|
||||
GeoDistance copy = GeoDistance.readFromStream(in);
|
||||
assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance);
|
||||
}
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -59,7 +58,7 @@ public class CircleBuilderTests extends AbstractShapeBuilderTestCase<CircleBuild
|
|||
DistanceUnit newRandom = unit;
|
||||
while (newRandom == unit) {
|
||||
newRandom = randomFrom(DistanceUnit.values());
|
||||
};
|
||||
}
|
||||
unit = newRandom;
|
||||
}
|
||||
return mutation.radius(radius, unit);
|
||||
|
|
|
@ -570,7 +570,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReadWriteGeoPoint() throws IOException {
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {;
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||
out.writeGenericValue(geoPoint);
|
||||
StreamInput wrap = out.bytes().streamInput();
|
||||
|
|
|
@ -67,7 +67,7 @@ public class AsyncIOProcessorTests extends ESTestCase {
|
|||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
thread[i].start();
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public class AsyncIOProcessorTests extends ESTestCase {
|
|||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
thread[i].start();
|
||||
}
|
||||
|
|
|
@ -392,7 +392,7 @@ public class LiveVersionMapTests extends ESTestCase {
|
|||
public void testPruneTombstonesWhileLocked() throws InterruptedException, IOException {
|
||||
LiveVersionMap map = new LiveVersionMap();
|
||||
BytesRef uid = uid("1");
|
||||
;
|
||||
|
||||
try (Releasable ignore = map.acquireLock(uid)) {
|
||||
map.putDeleteUnderLock(uid, new DeleteVersionValue(0, 0, 0, 0));
|
||||
map.beforeRefresh(); // refresh otherwise we won't prune since it's tracked by the current map
|
||||
|
|
|
@ -51,8 +51,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -81,7 +81,7 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
|
|
@ -861,6 +861,6 @@ public class FunctionScoreTests extends ESTestCase {
|
|||
@Override
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,10 +56,10 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
|
|||
newIndices[i] = randomSimpleString(random(), 1, 30);
|
||||
}
|
||||
request.indices(newIndices);
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.indices()[i]);
|
||||
}
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.getSearchRequest().indices()[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,10 +49,10 @@ public class UpdateByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
|
|||
newIndices[i] = randomSimpleString(random(), 1, 30);
|
||||
}
|
||||
request.indices(newIndices);
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.indices()[i]);
|
||||
}
|
||||
for (int i = 0; i < numNewIndices; i++) {;
|
||||
for (int i = 0; i < numNewIndices; i++) {
|
||||
assertEquals(newIndices[i], request.getSearchRequest().indices()[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,9 +28,6 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.Percentile;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket;
|
||||
import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -192,7 +189,6 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
|
|||
String name = instance.getName();
|
||||
double[] percents = extractPercents(instance);
|
||||
double[] percentiles = extractPercentiles(instance);
|
||||
;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
|
|
|
@ -152,7 +152,7 @@ public class NestedSortBuilderTests extends ESTestCase {
|
|||
@Override
|
||||
protected QueryBuilder doRewrite(org.elasticsearch.index.query.QueryRewriteContext queryShardContext) throws IOException {
|
||||
return new MatchAllQueryBuilder();
|
||||
};
|
||||
}
|
||||
};
|
||||
// test that filter gets rewritten
|
||||
NestedSortBuilder original = new NestedSortBuilder("path").setFilter(filterThatRewrites);
|
||||
|
|
|
@ -40,7 +40,7 @@ public class EqualsHashCodeTestUtils {
|
|||
*/
|
||||
public interface CopyFunction<T> {
|
||||
T copy(T t) throws IOException;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* A function that creates a copy of its input argument that is different from its
|
||||
|
@ -48,7 +48,7 @@ public class EqualsHashCodeTestUtils {
|
|||
*/
|
||||
public interface MutateFunction<T> {
|
||||
T mutate(T t) throws IOException;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform common equality and hashCode checks on the input object
|
||||
|
|
|
@ -324,7 +324,7 @@ public final class FrozenEngine extends ReadOnlyEngine {
|
|||
@Override
|
||||
public LeafReader wrap(LeafReader reader) {
|
||||
return new LazyLeafReader(reader);
|
||||
};
|
||||
}
|
||||
});
|
||||
this.delegate = reader;
|
||||
this.engine = engine;
|
||||
|
|
|
@ -99,7 +99,7 @@ public class DataCounts implements ToXContentObject, Writeable {
|
|||
p -> TimeUtils.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), LATEST_EMPTY_BUCKET_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
|
||||
p -> TimeUtils.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), LATEST_SPARSE_BUCKET_TIME, ValueType.VALUE);
|
||||
PARSER.declareLong((t, u) -> {;}, INPUT_RECORD_COUNT);
|
||||
PARSER.declareLong((t, u) -> {/* intentionally empty */}, INPUT_RECORD_COUNT);
|
||||
}
|
||||
|
||||
public static String documentId(String jobId) {
|
||||
|
|
|
@ -1214,7 +1214,7 @@ public class Cron implements ToXContentFragment {
|
|||
|
||||
private static int skipWhiteSpace(int i, String s) {
|
||||
for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) {
|
||||
;
|
||||
// intentionally empty
|
||||
}
|
||||
|
||||
return i;
|
||||
|
@ -1222,7 +1222,7 @@ public class Cron implements ToXContentFragment {
|
|||
|
||||
private static int findNextWhiteSpace(int i, String s) {
|
||||
for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) {
|
||||
;
|
||||
// intentionally empty
|
||||
}
|
||||
|
||||
return i;
|
||||
|
|
|
@ -167,6 +167,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
new AnalysisLimits(1L, 1L);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalysisLimits mutateInstance(AnalysisLimits instance) throws IOException {
|
||||
Long memoryModelLimit = instance.getModelMemoryLimit();
|
||||
Long categorizationExamplesLimit = instance.getCategorizationExamplesLimit();
|
||||
|
@ -197,5 +198,5 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new AnalysisLimits(memoryModelLimit, categorizationExamplesLimit);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -282,6 +282,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
return DataDescription.STRICT_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataDescription mutateInstance(DataDescription instance) throws java.io.IOException {
|
||||
DataFormat format = instance.getFormat();
|
||||
String timeField = instance.getTimeField();
|
||||
|
@ -320,5 +321,5 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new DataDescription(format, timeField, timeFormat, delimiter, quoteChar);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -239,7 +239,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
+ " \"network_bytes_out\": { \"type\":\"long\"}"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}");;
|
||||
+ "}");
|
||||
client().performRequest(createIndexRequest);
|
||||
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TransportForecastJobAction extends TransportJobTaskAction<ForecastJ
|
|||
ForecastParams params = paramsBuilder.build();
|
||||
processManager.forecastJob(task, params, e -> {
|
||||
if (e == null) {
|
||||
; getForecastRequestStats(request.getJobId(), params.getForecastId(), listener);
|
||||
getForecastRequestStats(request.getJobId(), params.getForecastId(), listener);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
public abstract class Normalizable implements ToXContentObject {
|
||||
public enum ChildType {BUCKET_INFLUENCER, RECORD};
|
||||
public enum ChildType {BUCKET_INFLUENCER, RECORD}
|
||||
|
||||
private final String indexName;
|
||||
private boolean hadBigNormalizedUpdate;
|
||||
|
|
|
@ -745,7 +745,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
return ua;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// to avoid creating duplicate functions
|
||||
|
@ -920,7 +920,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
return p;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//
|
||||
// Handle aggs in HAVING. To help folding any aggs not found in Aggregation
|
||||
|
|
|
@ -138,7 +138,7 @@ public class AttributeMap<E> {
|
|||
public String toString() {
|
||||
return set.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private final Map<AttributeWrapper, E> delegate;
|
||||
private Set<Attribute> keySet = null;
|
||||
|
|
|
@ -57,7 +57,7 @@ public class StringProcessor implements Processor {
|
|||
int i = n.intValue();
|
||||
if (i < 0) {
|
||||
return null;
|
||||
};
|
||||
}
|
||||
char[] spaces = new char[i];
|
||||
char whitespace = ' ';
|
||||
Arrays.fill(spaces, whitespace);
|
||||
|
|
|
@ -1951,5 +1951,5 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
enum TransformDirection {
|
||||
UP, DOWN
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ public class SqlParser {
|
|||
log.info(format(Locale.ROOT, " %-15s '%s'",
|
||||
symbolicName == null ? literalName : symbolicName,
|
||||
t.getText()));
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
ParserRuleContext tree = parseFunction.apply(parser);
|
||||
|
|
|
@ -35,7 +35,7 @@ public final class Cursors {
|
|||
|
||||
private static final NamedWriteableRegistry WRITEABLE_REGISTRY = new NamedWriteableRegistry(getNamedWriteables());
|
||||
|
||||
private Cursors() {};
|
||||
private Cursors() {}
|
||||
|
||||
/**
|
||||
* The {@link NamedWriteable}s required to deserialize {@link Cursor}s.
|
||||
|
|
Loading…
Reference in New Issue