Add OneStatementPerLineCheck to Checkstyle rules (#33682)

This change adds the OneStatementPerLineCheck to our checkstyle precommit
checks. This rule restricts the number of statements per line to one. The
resoning behind this is that it is very difficult to read multiple statements on
one line. People seem to mostly use it in short lambdas and switch statements in
our code base, but just going through the changes already uncovered some actual
problems in randomization in test code, so I think its worth it.
This commit is contained in:
Christoph Büscher 2018-09-21 11:52:31 +02:00 committed by GitHub
parent 3a5b8a71b4
commit b654d986d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 247 additions and 103 deletions

View File

@ -35,6 +35,8 @@
<module name="OuterTypeFilename" />
<!-- No line wraps inside of import and package statements. -->
<module name="NoLineWrap" />
<!-- only one statement per line should be allowed -->
<module name="OneStatementPerLine"/>
<!-- Each java file has only one outer class -->
<module name="OneTopLevelClass" />
<!-- The suffix L is preferred, because the letter l (ell) is often

View File

@ -347,17 +347,39 @@ public final class MethodWriter extends GeneratorAdapter {
}
switch (operation) {
case MUL: math(GeneratorAdapter.MUL, getType(clazz)); break;
case DIV: math(GeneratorAdapter.DIV, getType(clazz)); break;
case REM: math(GeneratorAdapter.REM, getType(clazz)); break;
case ADD: math(GeneratorAdapter.ADD, getType(clazz)); break;
case SUB: math(GeneratorAdapter.SUB, getType(clazz)); break;
case LSH: math(GeneratorAdapter.SHL, getType(clazz)); break;
case USH: math(GeneratorAdapter.USHR, getType(clazz)); break;
case RSH: math(GeneratorAdapter.SHR, getType(clazz)); break;
case BWAND: math(GeneratorAdapter.AND, getType(clazz)); break;
case XOR: math(GeneratorAdapter.XOR, getType(clazz)); break;
case BWOR: math(GeneratorAdapter.OR, getType(clazz)); break;
case MUL:
math(GeneratorAdapter.MUL, getType(clazz));
break;
case DIV:
math(GeneratorAdapter.DIV, getType(clazz));
break;
case REM:
math(GeneratorAdapter.REM, getType(clazz));
break;
case ADD:
math(GeneratorAdapter.ADD, getType(clazz));
break;
case SUB:
math(GeneratorAdapter.SUB, getType(clazz));
break;
case LSH:
math(GeneratorAdapter.SHL, getType(clazz));
break;
case USH:
math(GeneratorAdapter.USHR, getType(clazz));
break;
case RSH:
math(GeneratorAdapter.SHR, getType(clazz));
break;
case BWAND:
math(GeneratorAdapter.AND, getType(clazz));
break;
case XOR:
math(GeneratorAdapter.XOR, getType(clazz));
break;
case BWOR:
math(GeneratorAdapter.OR, getType(clazz));
break;
default:
throw location.createError(new IllegalStateException("Illegal tree structure."));
}

View File

@ -417,18 +417,33 @@ public final class SSource extends AStatement {
for (AStatement statement : statements) {
statement.write(writer, globals);
}
if (!methodEscape) {
switch (scriptClassInfo.getExecuteMethod().getReturnType().getSort()) {
case org.objectweb.asm.Type.VOID: break;
case org.objectweb.asm.Type.BOOLEAN: writer.push(false); break;
case org.objectweb.asm.Type.BYTE: writer.push(0); break;
case org.objectweb.asm.Type.SHORT: writer.push(0); break;
case org.objectweb.asm.Type.INT: writer.push(0); break;
case org.objectweb.asm.Type.LONG: writer.push(0L); break;
case org.objectweb.asm.Type.FLOAT: writer.push(0f); break;
case org.objectweb.asm.Type.DOUBLE: writer.push(0d); break;
default: writer.visitInsn(Opcodes.ACONST_NULL);
case org.objectweb.asm.Type.VOID:
break;
case org.objectweb.asm.Type.BOOLEAN:
writer.push(false);
break;
case org.objectweb.asm.Type.BYTE:
writer.push(0);
break;
case org.objectweb.asm.Type.SHORT:
writer.push(0);
break;
case org.objectweb.asm.Type.INT:
writer.push(0);
break;
case org.objectweb.asm.Type.LONG:
writer.push(0L);
break;
case org.objectweb.asm.Type.FLOAT:
writer.push(0f);
break;
case org.objectweb.asm.Type.DOUBLE:
writer.push(0d);
break;
default:
writer.visitInsn(Opcodes.ACONST_NULL);
}
writer.returnValue();
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery;
@ -38,7 +39,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.spans.SpanFirstQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
@ -489,43 +489,51 @@ final class QueryAnalyzer {
return subResult;
}
}
int msm = 0;
boolean verified = true;
boolean matchAllDocs = true;
boolean hasDuplicateTerms = false;Set<QueryExtraction> extractions = new HashSet<>();
Set<String> seenRangeFields = new HashSet<>();
for (Result result : conjunctions) {
// In case that there are duplicate query extractions we need to be careful with incrementing msm,
// because that could lead to valid matches not becoming candidate matches:
// query: (field:val1 AND field:val2) AND (field:val2 AND field:val3)
// doc: field: val1 val2 val3
// So lets be protective and decrease the msm:
int resultMsm = result.minimumShouldMatch;
for (QueryExtraction queryExtraction : result.extractions) {
if (queryExtraction.range != null) {
// In case of range queries each extraction does not simply increment the minimum_should_match
// for that percolator query like for a term based extraction, so that can lead to more false
// positives for percolator queries with range queries than term based queries.
// The is because the way number fields are extracted from the document to be percolated.
// Per field a single range is extracted and if a percolator query has two or more range queries
// on the same field, then the minimum should match can be higher than clauses in the CoveringQuery.
// Therefore right now the minimum should match is incremented once per number field when processing
// the percolator query at index time.
if (seenRangeFields.add(queryExtraction.range.fieldName)) {
resultMsm = 1;
} else {
resultMsm = 0;
}
}
int msm = 0;
boolean verified = true;
boolean matchAllDocs = true;
boolean hasDuplicateTerms = false;
Set<QueryExtraction> extractions = new HashSet<>();
Set<String> seenRangeFields = new HashSet<>();
for (Result result : conjunctions) {
// In case that there are duplicate query extractions we need to be careful with
// incrementing msm,
// because that could lead to valid matches not becoming candidate matches:
// query: (field:val1 AND field:val2) AND (field:val2 AND field:val3)
// doc: field: val1 val2 val3
// So lets be protective and decrease the msm:
int resultMsm = result.minimumShouldMatch;
for (QueryExtraction queryExtraction : result.extractions) {
if (queryExtraction.range != null) {
// In case of range queries each extraction does not simply increment the
// minimum_should_match
// for that percolator query like for a term based extraction, so that can lead
// to more false
// positives for percolator queries with range queries than term based queries.
// The is because the way number fields are extracted from the document to be
// percolated.
// Per field a single range is extracted and if a percolator query has two or
// more range queries
// on the same field, then the minimum should match can be higher than clauses
// in the CoveringQuery.
// Therefore right now the minimum should match is incremented once per number
// field when processing
// the percolator query at index time.
if (seenRangeFields.add(queryExtraction.range.fieldName)) {
resultMsm = 1;
} else {
resultMsm = 0;
}
}
if (extractions.contains(queryExtraction)) {
if (extractions.contains(queryExtraction)) {
resultMsm = 0;
verified = false;
break;
}
}
msm += resultMsm;
resultMsm = 0;
verified = false;
break;
}
}
msm += resultMsm;
if (result.verified == false
// If some inner extractions are optional, the result can't be verified

View File

@ -295,7 +295,10 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
StringBuilder sb = new StringBuilder();
sb.append(textMinusMarkup);
sb.append("\n");
annotations.forEach(a -> {sb.append(a); sb.append("\n");});
annotations.forEach(a -> {
sb.append(a);
sb.append("\n");
});
return sb.toString();
}

View File

@ -410,9 +410,14 @@ public abstract class Rounding implements Writeable {
Rounding rounding;
byte id = in.readByte();
switch (id) {
case TimeUnitRounding.ID: rounding = new TimeUnitRounding(in); break;
case TimeIntervalRounding.ID: rounding = new TimeIntervalRounding(in); break;
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
case TimeUnitRounding.ID:
rounding = new TimeUnitRounding(in);
break;
case TimeIntervalRounding.ID:
rounding = new TimeIntervalRounding(in);
break;
default:
throw new ElasticsearchException("unknown rounding id [" + id + "]");
}
return rounding;
}

View File

@ -882,7 +882,8 @@ final class DocumentParser {
builder = new ObjectMapper.Builder(paths[i]).enabled(true);
}
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(),
context.path()); mapper = (ObjectMapper) builder.build(builderContext);
context.path());
mapper = (ObjectMapper) builder.build(builderContext);
if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i])
+ "]) through `copy_to` or dots in field names");

View File

@ -502,10 +502,18 @@ public class InternalOrder extends BucketOrder {
// convert the new order IDs to the old histogram order IDs.
byte id;
switch (order.id()) {
case COUNT_DESC_ID: id = 4; break;
case COUNT_ASC_ID: id = 3; break;
case KEY_DESC_ID: id = 2; break;
case KEY_ASC_ID: id = 1; break;
case COUNT_DESC_ID:
id = 4;
break;
case COUNT_ASC_ID:
id = 3;
break;
case KEY_DESC_ID:
id = 2;
break;
case KEY_ASC_ID:
id = 1;
break;
default: throw new RuntimeException("unknown order id [" + order.id() + "]");
}
out.writeByte(id);

View File

@ -232,7 +232,10 @@ public class QueryPhase implements SearchPhase {
final Runnable checkCancelled;
if (timeoutRunnable != null && cancellationRunnable != null) {
checkCancelled = () -> { timeoutRunnable.run(); cancellationRunnable.run(); };
checkCancelled = () -> {
timeoutRunnable.run();
cancellationRunnable.run();
};
} else if (timeoutRunnable != null) {
checkCancelled = timeoutRunnable;
} else if (cancellationRunnable != null) {

View File

@ -152,7 +152,10 @@ public class ScopedSettingsTests extends ESTestCase {
}
try {
service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {consumer.set(a); consumer2.set(b);});
service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {
consumer.set(a);
consumer2.set(b);
});
fail("setting not registered");
} catch (IllegalArgumentException ex) {
assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage());
@ -467,7 +470,10 @@ public class ScopedSettingsTests extends ESTestCase {
AtomicInteger aC = new AtomicInteger();
AtomicInteger bC = new AtomicInteger();
service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {aC.set(a); bC.set(b);});
service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {
aC.set(a);
bC.set(b);
});
assertEquals(0, consumer.get());
assertEquals(0, consumer2.get());

View File

@ -142,8 +142,14 @@ public class AsyncIOProcessorTests extends ESTestCase {
received.addAndGet(candidates.size());
}
};
processor.put(new Object(), (e) -> {notified.incrementAndGet();throw new RuntimeException();});
processor.put(new Object(), (e) -> {notified.incrementAndGet();throw new RuntimeException();});
processor.put(new Object(), (e) -> {
notified.incrementAndGet();
throw new RuntimeException();
});
processor.put(new Object(), (e) -> {
notified.incrementAndGet();
throw new RuntimeException();
});
assertEquals(2, notified.get());
assertEquals(2, received.get());
}

View File

@ -208,10 +208,18 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
assertEquals(status.hashCode(), sameStatus.hashCode());
switch (randomInt(3)) {
case 0: task.setPhase("otherPhase"); break;
case 1: task.setResyncedOperations(task.getResyncedOperations() + 1); break;
case 2: task.setSkippedOperations(task.getSkippedOperations() + 1); break;
case 3: task.setTotalOperations(task.getTotalOperations() + 1); break;
case 0:
task.setPhase("otherPhase");
break;
case 1:
task.setResyncedOperations(task.getResyncedOperations() + 1);
break;
case 2:
task.setSkippedOperations(task.getSkippedOperations() + 1);
break;
case 3:
task.setTotalOperations(task.getTotalOperations() + 1);
break;
}
PrimaryReplicaSyncer.ResyncTask.Status differentStatus = task.getStatus();

View File

@ -42,7 +42,10 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase {
public void testEmptySettingsAreOkay() throws InterruptedException {
AtomicBoolean scheduled = new AtomicBoolean();
execute(Settings.EMPTY,
(command, interval, name) -> { scheduled.set(true); return new MockCancellable(); },
(command, interval, name) -> {
scheduled.set(true);
return new MockCancellable();
},
() -> assertTrue(scheduled.get()));
}
@ -50,7 +53,10 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase {
Settings settings = Settings.builder().put("monitor.jvm.gc.enabled", "false").build();
AtomicBoolean scheduled = new AtomicBoolean();
execute(settings,
(command, interval, name) -> { scheduled.set(true); return new MockCancellable(); },
(command, interval, name) -> {
scheduled.set(true);
return new MockCancellable();
},
() -> assertFalse(scheduled.get()));
}

View File

@ -66,7 +66,8 @@ public class BooleanTermsIT extends ESIntegTestCase {
multiValue = new boolean[] {true};
break;
case 3:
numMultiFalses++; numMultiTrues++;
numMultiFalses++;
numMultiTrues++;
multiValue = new boolean[] {false, true};
break;
default:

View File

@ -710,9 +710,11 @@ public class HighlightBuilderTests extends ESTestCase {
switch (randomIntBetween(0, 2)) {
// change settings that only exists on top level
case 0:
mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); break;
mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder());
break;
case 1:
mutation.encoder(original.encoder() + randomAlphaOfLength(2)); break;
mutation.encoder(original.encoder() + randomAlphaOfLength(2));
break;
case 2:
if (randomBoolean()) {
// add another field

View File

@ -381,7 +381,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
out.writeMap(
fetchExceptions,
StreamOutput::writeVLong,
(stream, value) -> { stream.writeVInt(value.v1()); stream.writeException(value.v2()); });
(stream, value) -> {
stream.writeVInt(value.v1());
stream.writeException(value.v2());
});
out.writeZLong(timeSinceLastFetchMillis);
}
@ -526,6 +529,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status {
return status.fetchExceptions().values().stream().map(t -> t.v2().getMessage()).collect(Collectors.toList());
}
@Override
public String toString() {
return Strings.toString(this);
}

View File

@ -1352,14 +1352,28 @@ public class Cron implements ToXContentFragment {
int max = -1;
if (stopAt < startAt) {
switch (type) {
case SECOND : max = 60; break;
case MINUTE : max = 60; break;
case HOUR : max = 24; break;
case MONTH : max = 12; break;
case DAY_OF_WEEK : max = 7; break;
case DAY_OF_MONTH : max = 31; break;
case YEAR : throw new IllegalArgumentException("Start year must be less than stop year");
default : throw new IllegalArgumentException("Unexpected type encountered");
case SECOND:
max = 60;
break;
case MINUTE:
max = 60;
break;
case HOUR:
max = 24;
break;
case MONTH:
max = 12;
break;
case DAY_OF_WEEK:
max = 7;
break;
case DAY_OF_MONTH:
max = 31;
break;
case YEAR:
throw new IllegalArgumentException("Start year must be less than stop year");
default:
throw new IllegalArgumentException("Unexpected type encountered");
}
stopAt += max;
}

View File

@ -24,6 +24,7 @@ public class ChainTaskExecutorTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
private final CountDownLatch latch = new CountDownLatch(1);
@Override
@After
public void tearDown() throws Exception {
try {
@ -37,8 +38,14 @@ public class ChainTaskExecutorTests extends ESTestCase {
final List<String> strings = new ArrayList<>();
ActionListener<Void> finalListener = createBlockingListener(() -> strings.add("last"), e -> fail());
ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false);
chainTaskExecutor.add(listener -> { strings.add("first"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> { strings.add("second"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> {
strings.add("first");
listener.onResponse(null);
});
chainTaskExecutor.add(listener -> {
strings.add("second");
listener.onResponse(null);
});
chainTaskExecutor.execute(finalListener);
@ -52,9 +59,17 @@ public class ChainTaskExecutorTests extends ESTestCase {
ActionListener<Void> finalListener = createBlockingListener(() -> fail(),
e -> assertThat(e.getMessage(), equalTo("some error")));
ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true);
chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); });
chainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> {
strings.add("before");
listener.onResponse(null);
});
chainTaskExecutor.add(listener -> {
throw new RuntimeException("some error");
});
chainTaskExecutor.add(listener -> {
strings.add("after");
listener.onResponse(null);
});
chainTaskExecutor.execute(finalListener);
@ -68,9 +83,16 @@ public class ChainTaskExecutorTests extends ESTestCase {
ActionListener<Void> finalListener = createBlockingListener(() -> fail(),
e -> assertThat(e.getMessage(), equalTo("some error 1")));
ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true);
chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> { throw new RuntimeException("some error 1"); });
chainTaskExecutor.add(listener -> { throw new RuntimeException("some error 2"); });
chainTaskExecutor.add(listener -> {
strings.add("before");
listener.onResponse(null);
});
chainTaskExecutor.add(listener -> {
throw new RuntimeException("some error 1");
});
chainTaskExecutor.add(listener -> {
throw new RuntimeException("some error 2");
});
chainTaskExecutor.execute(finalListener);
@ -83,9 +105,17 @@ public class ChainTaskExecutorTests extends ESTestCase {
final List<String> strings = new ArrayList<>();
ActionListener<Void> finalListener = createBlockingListener(() -> strings.add("last"), e -> fail());
ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false);
chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); });
chainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); });
chainTaskExecutor.add(listener -> {
strings.add("before");
listener.onResponse(null);
});
chainTaskExecutor.add(listener -> {
throw new RuntimeException("some error");
});
chainTaskExecutor.add(listener -> {
strings.add("after");
listener.onResponse(null);
});
chainTaskExecutor.execute(finalListener);

View File

@ -55,8 +55,6 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import javax.crypto.SecretKey;
import java.io.IOException;
import java.time.Clock;
import java.time.Instant;
@ -67,6 +65,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import javax.crypto.SecretKey;
import static java.time.Clock.systemUTC;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.hamcrest.Matchers.containsString;
@ -253,7 +253,7 @@ public class TokenServiceTests extends ESTestCase {
public void testKeyExchange() throws Exception {
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
int numRotations = 0;randomIntBetween(1, 5);
int numRotations = randomIntBetween(1, 5);
for (int i = 0; i < numRotations; i++) {
rotateKeys(tokenService);
}