Upgrade checkstyle to version 7.5
This commit upgrades the checkstyle configuration from version 5.9 to version 7.5, the latest version as of today. The main enhancement obtained via this upgrade is better detection of redundant modifiers. Relates #22960
This commit is contained in:
parent
ea4eb06b0a
commit
9a0b216c36
|
@ -139,6 +139,7 @@ class PrecommitTasks {
|
|||
configProperties = [
|
||||
suppressions: checkstyleSuppressions
|
||||
]
|
||||
toolVersion = 7.5
|
||||
}
|
||||
for (String taskName : ['checkstyleMain', 'checkstyleTest']) {
|
||||
Task task = project.tasks.findByName(taskName)
|
||||
|
|
|
@ -95,7 +95,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
|
|||
private final BlockingQueue<List<String>> bulkQueue;
|
||||
private final int bulkSize;
|
||||
|
||||
public LoadGenerator(Path bulkDataFile, BlockingQueue<List<String>> bulkQueue, int bulkSize) {
|
||||
LoadGenerator(Path bulkDataFile, BlockingQueue<List<String>> bulkQueue, int bulkSize) {
|
||||
this.bulkDataFile = bulkDataFile;
|
||||
this.bulkQueue = bulkQueue;
|
||||
this.bulkSize = bulkSize;
|
||||
|
@ -143,7 +143,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
|
|||
private final BulkRequestExecutor bulkRequestExecutor;
|
||||
private final SampleRecorder sampleRecorder;
|
||||
|
||||
public BulkIndexer(BlockingQueue<List<String>> bulkData, int warmupIterations, int measurementIterations,
|
||||
BulkIndexer(BlockingQueue<List<String>> bulkData, int warmupIterations, int measurementIterations,
|
||||
SampleRecorder sampleRecorder, BulkRequestExecutor bulkRequestExecutor) {
|
||||
this.bulkData = bulkData;
|
||||
this.warmupIterations = warmupIterations;
|
||||
|
|
|
@ -73,7 +73,7 @@ public final class RestClientBenchmark extends AbstractBenchmark<RestClient> {
|
|||
private final RestClient client;
|
||||
private final String actionMetaData;
|
||||
|
||||
public RestBulkRequestExecutor(RestClient client, String index, String type) {
|
||||
RestBulkRequestExecutor(RestClient client, String index, String type) {
|
||||
this.client = client;
|
||||
this.actionMetaData = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type);
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public final class TransportClientBenchmark extends AbstractBenchmark<TransportC
|
|||
private final String indexName;
|
||||
private final String typeName;
|
||||
|
||||
public TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
|
||||
TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
|
||||
this.client = client;
|
||||
this.indexName = indexName;
|
||||
this.typeName = typeName;
|
||||
|
|
|
@ -90,7 +90,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
|
|||
private final RestRequest request;
|
||||
|
||||
|
||||
public BulkRestBuilderListener(RestChannel channel, RestRequest request) {
|
||||
BulkRestBuilderListener(RestChannel channel, RestRequest request) {
|
||||
super(channel);
|
||||
this.request = request;
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ interface HttpAsyncResponseConsumerFactory {
|
|||
|
||||
private final int bufferLimit;
|
||||
|
||||
public HeapBufferedResponseConsumerFactory(int bufferLimitBytes) {
|
||||
HeapBufferedResponseConsumerFactory(int bufferLimitBytes) {
|
||||
this.bufferLimit = bufferLimitBytes;
|
||||
}
|
||||
|
||||
|
|
|
@ -705,7 +705,7 @@ public class RestClient implements Closeable {
|
|||
public final T hosts;
|
||||
public final AuthCache authCache;
|
||||
|
||||
public HostTuple(final T hosts, final AuthCache authCache) {
|
||||
HostTuple(final T hosts, final AuthCache authCache) {
|
||||
this.hosts = hosts;
|
||||
this.authCache = authCache;
|
||||
}
|
||||
|
|
|
@ -219,7 +219,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
|||
public void testPreemptiveAuthEnabled() throws IOException {
|
||||
final String[] methods = { "POST", "PUT", "GET", "DELETE" };
|
||||
|
||||
try (final RestClient restClient = createRestClient(true, true)) {
|
||||
try (RestClient restClient = createRestClient(true, true)) {
|
||||
for (final String method : methods) {
|
||||
final Response response = bodyTest(restClient, method);
|
||||
|
||||
|
@ -234,7 +234,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
|||
public void testPreemptiveAuthDisabled() throws IOException {
|
||||
final String[] methods = { "POST", "PUT", "GET", "DELETE" };
|
||||
|
||||
try (final RestClient restClient = createRestClient(true, false)) {
|
||||
try (RestClient restClient = createRestClient(true, false)) {
|
||||
for (final String method : methods) {
|
||||
final Response response = bodyTest(restClient, method);
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs {
|
|||
// Which hit within the shard:
|
||||
int hitIndex;
|
||||
|
||||
public ShardRef(int shardIndex) {
|
||||
ShardRef(int shardIndex) {
|
||||
this.shardIndex = shardIndex;
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs {
|
|||
final FieldComparator<?>[] comparators;
|
||||
final int[] reverseMul;
|
||||
|
||||
public MergeSortQueue(Sort sort, CollapseTopFieldDocs[] shardHits) throws IOException {
|
||||
MergeSortQueue(Sort sort, CollapseTopFieldDocs[] shardHits) throws IOException {
|
||||
super(shardHits.length);
|
||||
this.shardHits = new ScoreDoc[shardHits.length][];
|
||||
for (int shardIDX = 0; shardIDX < shardHits.length; shardIDX++) {
|
||||
|
|
|
@ -56,7 +56,7 @@ abstract class CollapsingDocValuesSource<T> {
|
|||
private NumericDocValues values;
|
||||
private Bits docsWithField;
|
||||
|
||||
public Numeric(String field) throws IOException {
|
||||
Numeric(String field) throws IOException {
|
||||
super(field);
|
||||
}
|
||||
|
||||
|
@ -122,7 +122,7 @@ abstract class CollapsingDocValuesSource<T> {
|
|||
private Bits docsWithField;
|
||||
private SortedDocValues values;
|
||||
|
||||
public Keyword(String field) throws IOException {
|
||||
Keyword(String field) throws IOException {
|
||||
super(field);
|
||||
}
|
||||
|
||||
|
|
|
@ -392,7 +392,7 @@ public long ramBytesUsed() {
|
|||
final BytesRefBuilder spare = new BytesRefBuilder();
|
||||
private char sepLabel;
|
||||
|
||||
public EscapingTokenStreamToAutomaton(char sepLabel) {
|
||||
EscapingTokenStreamToAutomaton(char sepLabel) {
|
||||
this.sepLabel = sepLabel;
|
||||
}
|
||||
|
||||
|
@ -432,7 +432,7 @@ public long ramBytesUsed() {
|
|||
|
||||
private final boolean hasPayloads;
|
||||
|
||||
public AnalyzingComparator(boolean hasPayloads) {
|
||||
AnalyzingComparator(boolean hasPayloads) {
|
||||
this.hasPayloads = hasPayloads;
|
||||
}
|
||||
|
||||
|
@ -1114,7 +1114,7 @@ public long ramBytesUsed() {
|
|||
BytesRef payload;
|
||||
long weight;
|
||||
|
||||
public SurfaceFormAndPayload(BytesRef payload, long cost) {
|
||||
SurfaceFormAndPayload(BytesRef payload, long cost) {
|
||||
super();
|
||||
this.payload = payload;
|
||||
this.weight = cost;
|
||||
|
|
|
@ -214,7 +214,7 @@ public final class ExceptionsHelper {
|
|||
final String index;
|
||||
final Class<? extends Throwable> causeType;
|
||||
|
||||
public GroupBy(Throwable t) {
|
||||
GroupBy(Throwable t) {
|
||||
if (t instanceof ElasticsearchException) {
|
||||
final Index index = ((ElasticsearchException) t).getIndex();
|
||||
if (index != null) {
|
||||
|
|
|
@ -382,7 +382,7 @@ public class ActionModule extends AbstractModule {
|
|||
static Map<String, ActionHandler<?, ?>> setupActions(List<ActionPlugin> actionPlugins) {
|
||||
// Subclass NamedRegistry for easy registration
|
||||
class ActionRegistry extends NamedRegistry<ActionHandler<?, ?>> {
|
||||
public ActionRegistry() {
|
||||
ActionRegistry() {
|
||||
super("action");
|
||||
}
|
||||
|
||||
|
|
|
@ -29,5 +29,5 @@ public interface ListenableActionFuture<T> extends ActionFuture<T> {
|
|||
/**
|
||||
* Add an action listener to be invoked when a response has received.
|
||||
*/
|
||||
void addListener(final ActionListener<T> listener);
|
||||
void addListener(ActionListener<T> listener);
|
||||
}
|
||||
|
|
|
@ -217,7 +217,7 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
|
|||
private final AtomicInteger counter;
|
||||
private final int nodesSize;
|
||||
|
||||
public BanLock(int nodesSize, Runnable finish) {
|
||||
BanLock(int nodesSize, Runnable finish) {
|
||||
counter = new AtomicInteger(0);
|
||||
this.finish = finish;
|
||||
this.nodesSize = nodesSize;
|
||||
|
@ -268,7 +268,7 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
|
|||
this.ban = false;
|
||||
}
|
||||
|
||||
public BanParentTaskRequest() {
|
||||
BanParentTaskRequest() {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -47,7 +47,7 @@ public enum SnapshotIndexShardStage {
|
|||
|
||||
private boolean completed;
|
||||
|
||||
private SnapshotIndexShardStage(byte value, boolean completed) {
|
||||
SnapshotIndexShardStage(byte value, boolean completed) {
|
||||
this.value = value;
|
||||
this.completed = completed;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.List;
|
|||
*/
|
||||
public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
||||
|
||||
public static enum Feature {
|
||||
public enum Feature {
|
||||
ALIASES((byte) 0, "_aliases", "_alias"),
|
||||
MAPPINGS((byte) 1, "_mappings", "_mapping"),
|
||||
SETTINGS((byte) 2, "_settings");
|
||||
|
@ -52,7 +52,7 @@ public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
|||
private final String preferredName;
|
||||
private final byte id;
|
||||
|
||||
private Feature(byte id, String... validNames) {
|
||||
Feature(byte id, String... validNames) {
|
||||
assert validNames != null && validNames.length > 0;
|
||||
this.id = id;
|
||||
this.validNames = Arrays.asList(validNames);
|
||||
|
|
|
@ -47,7 +47,7 @@ public abstract class Condition<T> implements NamedWriteable {
|
|||
this.name = name;
|
||||
}
|
||||
|
||||
public abstract Result evaluate(final Stats stats);
|
||||
public abstract Result evaluate(Stats stats);
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
|
|
|
@ -226,7 +226,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
|
|||
private final List<NodeGatewayStartedShards> responses;
|
||||
private final List<FailedNodeException> failures;
|
||||
|
||||
public Response(ShardId shardId, List<NodeGatewayStartedShards> responses, List<FailedNodeException> failures) {
|
||||
Response(ShardId shardId, List<NodeGatewayStartedShards> responses, List<FailedNodeException> failures) {
|
||||
this.shardId = shardId;
|
||||
this.responses = responses;
|
||||
this.failures = failures;
|
||||
|
|
|
@ -171,7 +171,7 @@ public abstract class BackoffPolicy implements Iterable<TimeValue> {
|
|||
|
||||
private final int numberOfElements;
|
||||
|
||||
public ConstantBackoff(TimeValue delay, int numberOfElements) {
|
||||
ConstantBackoff(TimeValue delay, int numberOfElements) {
|
||||
assert numberOfElements >= 0;
|
||||
this.delay = delay;
|
||||
this.numberOfElements = numberOfElements;
|
||||
|
@ -188,7 +188,7 @@ public abstract class BackoffPolicy implements Iterable<TimeValue> {
|
|||
private final int numberOfElements;
|
||||
private int curr;
|
||||
|
||||
public ConstantBackoffIterator(TimeValue delay, int numberOfElements) {
|
||||
ConstantBackoffIterator(TimeValue delay, int numberOfElements) {
|
||||
this.delay = delay;
|
||||
this.numberOfElements = numberOfElements;
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ public abstract class BackoffPolicy implements Iterable<TimeValue> {
|
|||
private final BackoffPolicy delegate;
|
||||
private final Runnable onBackoff;
|
||||
|
||||
public WrappedBackoffPolicy(BackoffPolicy delegate, Runnable onBackoff) {
|
||||
WrappedBackoffPolicy(BackoffPolicy delegate, Runnable onBackoff) {
|
||||
this.delegate = delegate;
|
||||
this.onBackoff = onBackoff;
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ public abstract class BackoffPolicy implements Iterable<TimeValue> {
|
|||
private final Iterator<TimeValue> delegate;
|
||||
private final Runnable onBackoff;
|
||||
|
||||
public WrappedBackoffIterator(Iterator<TimeValue> delegate, Runnable onBackoff) {
|
||||
WrappedBackoffIterator(Iterator<TimeValue> delegate, Runnable onBackoff) {
|
||||
this.delegate = delegate;
|
||||
this.onBackoff = onBackoff;
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ abstract class BulkRequestHandler {
|
|||
private final BulkProcessor.Listener listener;
|
||||
private final BackoffPolicy backoffPolicy;
|
||||
|
||||
public SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) {
|
||||
SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) {
|
||||
super(client);
|
||||
this.backoffPolicy = backoffPolicy;
|
||||
this.listener = listener;
|
||||
|
|
|
@ -102,7 +102,7 @@ public class Retry {
|
|||
private volatile BulkRequest currentBulkRequest;
|
||||
private volatile ScheduledFuture<?> scheduledRequestFuture;
|
||||
|
||||
public AbstractRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
AbstractRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
this.retryOnThrowable = retryOnThrowable;
|
||||
this.backoff = backoffPolicy.iterator();
|
||||
this.client = client;
|
||||
|
@ -213,7 +213,7 @@ public class Retry {
|
|||
}
|
||||
|
||||
static class AsyncRetryHandler extends AbstractRetryHandler {
|
||||
public AsyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
AsyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
super(retryOnThrowable, backoffPolicy, client, listener);
|
||||
}
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ public class Retry {
|
|||
return new SyncRetryHandler(retryOnThrowable, backoffPolicy, client, actionFuture);
|
||||
}
|
||||
|
||||
public SyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture<BulkResponse> actionFuture) {
|
||||
SyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture<BulkResponse> actionFuture) {
|
||||
super(retryOnThrowable, backoffPolicy, client, actionFuture);
|
||||
this.actionFuture = actionFuture;
|
||||
}
|
||||
|
|
|
@ -306,7 +306,7 @@ public class WorkingBulkByScrollTask extends BulkByScrollTask implements Success
|
|||
private final AtomicBoolean hasRun = new AtomicBoolean(false);
|
||||
private final AbstractRunnable delegate;
|
||||
|
||||
public RunOnce(AbstractRunnable delegate) {
|
||||
RunOnce(AbstractRunnable delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
|
|
|
@ -429,7 +429,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
private final SearchPhaseController searchPhaseController;
|
||||
private final AtomicArray<QuerySearchResultProvider> queryResults;
|
||||
|
||||
public FetchPhase(AtomicArray<QuerySearchResultProvider> queryResults,
|
||||
FetchPhase(AtomicArray<QuerySearchResultProvider> queryResults,
|
||||
SearchPhaseController searchPhaseController) {
|
||||
this.fetchResults = new AtomicArray<>(queryResults.length());
|
||||
this.searchPhaseController = searchPhaseController;
|
||||
|
|
|
@ -31,7 +31,7 @@ class ParsedScrollId {
|
|||
|
||||
private final ScrollIdForNode[] context;
|
||||
|
||||
public ParsedScrollId(String source, String type, ScrollIdForNode[] context) {
|
||||
ParsedScrollId(String source, String type, ScrollIdForNode[] context) {
|
||||
this.source = source;
|
||||
this.type = type;
|
||||
this.context = context;
|
||||
|
|
|
@ -23,7 +23,7 @@ class ScrollIdForNode {
|
|||
private final String node;
|
||||
private final long scrollId;
|
||||
|
||||
public ScrollIdForNode(String node, long scrollId) {
|
||||
ScrollIdForNode(String node, long scrollId) {
|
||||
this.node = node;
|
||||
this.scrollId = scrollId;
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction
|
|||
private final AtomicArray<DfsSearchResult> firstResults;
|
||||
private final Function<AtomicArray<QuerySearchResultProvider>, CheckedRunnable<Exception>> nextPhaseFactory;
|
||||
|
||||
public DfsQueryPhase(AtomicArray<DfsSearchResult> firstResults,
|
||||
DfsQueryPhase(AtomicArray<DfsSearchResult> firstResults,
|
||||
SearchPhaseController searchPhaseController,
|
||||
Function<AtomicArray<QuerySearchResultProvider>, CheckedRunnable<Exception>> nextPhaseFactory) {
|
||||
this.queryResult = new AtomicArray<>(firstResults.length());
|
||||
|
|
|
@ -197,7 +197,7 @@ public class SearchTransportService extends AbstractLifecycleComponent {
|
|||
static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest {
|
||||
private OriginalIndices originalIndices;
|
||||
|
||||
public SearchFreeContextRequest() {
|
||||
SearchFreeContextRequest() {
|
||||
}
|
||||
|
||||
SearchFreeContextRequest(SearchRequest request, long id) {
|
||||
|
|
|
@ -47,7 +47,7 @@ public interface ActionFilter {
|
|||
* filter chain. This base class should serve any action filter implementations that doesn't require
|
||||
* to apply async filtering logic.
|
||||
*/
|
||||
public abstract static class Simple extends AbstractComponent implements ActionFilter {
|
||||
abstract class Simple extends AbstractComponent implements ActionFilter {
|
||||
|
||||
protected Simple(Settings settings) {
|
||||
super(settings);
|
||||
|
|
|
@ -33,5 +33,5 @@ public interface ActionFilterChain<Request extends ActionRequest, Response exten
|
|||
* Continue processing the request. Should only be called if a response has not been sent through
|
||||
* the given {@link ActionListener listener}
|
||||
*/
|
||||
void proceed(Task task, final String action, final Request request, final ActionListener<Response> listener);
|
||||
void proceed(Task task, String action, Request request, ActionListener<Response> listener);
|
||||
}
|
||||
|
|
|
@ -522,10 +522,10 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
protected List<BroadcastShardOperationFailedException> exceptions;
|
||||
protected List<ShardOperationResult> results;
|
||||
|
||||
public NodeResponse() {
|
||||
NodeResponse() {
|
||||
}
|
||||
|
||||
public NodeResponse(String nodeId,
|
||||
NodeResponse(String nodeId,
|
||||
int totalShards,
|
||||
List<ShardOperationResult> results,
|
||||
List<BroadcastShardOperationFailedException> exceptions) {
|
||||
|
|
|
@ -52,5 +52,5 @@ public abstract class TransportClusterInfoAction<Request extends ClusterInfoRequ
|
|||
doMasterOperation(request, concreteIndices, state, listener);
|
||||
}
|
||||
|
||||
protected abstract void doMasterOperation(Request request, String[] concreteIndices, ClusterState state, final ActionListener<Response> listener);
|
||||
protected abstract void doMasterOperation(Request request, String[] concreteIndices, ClusterState state, ActionListener<Response> listener);
|
||||
}
|
||||
|
|
|
@ -575,7 +575,7 @@ public abstract class TransportReplicationAction<
|
|||
private class ResponseListener implements ActionListener<TransportResponse.Empty> {
|
||||
private final ReplicaResponse replicaResponse;
|
||||
|
||||
public ResponseListener(ReplicaResponse replicaResponse) {
|
||||
ResponseListener(ReplicaResponse replicaResponse) {
|
||||
this.replicaResponse = replicaResponse;
|
||||
}
|
||||
|
||||
|
|
|
@ -402,10 +402,10 @@ public abstract class TransportTasksAction<
|
|||
protected List<TaskOperationFailure> exceptions;
|
||||
protected List<TaskResponse> results;
|
||||
|
||||
public NodeTasksResponse() {
|
||||
NodeTasksResponse() {
|
||||
}
|
||||
|
||||
public NodeTasksResponse(String nodeId,
|
||||
NodeTasksResponse(String nodeId,
|
||||
List<TaskResponse> results,
|
||||
List<TaskOperationFailure> exceptions) {
|
||||
this.nodeId = nodeId;
|
||||
|
|
|
@ -200,7 +200,7 @@ public final class TermVectorsFields extends Fields {
|
|||
private long sumDocFreq;
|
||||
private int docCount;
|
||||
|
||||
public TermVector(BytesReference termVectors, long readOffset) throws IOException {
|
||||
TermVector(BytesReference termVectors, long readOffset) throws IOException {
|
||||
this.perFieldTermVectorInput = termVectors.streamInput();
|
||||
this.readOffset = readOffset;
|
||||
reset();
|
||||
|
|
|
@ -577,7 +577,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
out.writeLong(version);
|
||||
}
|
||||
|
||||
public static enum Flag {
|
||||
public enum Flag {
|
||||
// Do not change the order of these flags we use
|
||||
// the ordinal for encoding! Only append to the end!
|
||||
Positions, Offsets, Payloads, FieldStatistics, TermStatistics
|
||||
|
|
|
@ -207,7 +207,7 @@ final class BootstrapChecks {
|
|||
|
||||
static class OsXFileDescriptorCheck extends FileDescriptorCheck {
|
||||
|
||||
public OsXFileDescriptorCheck() {
|
||||
OsXFileDescriptorCheck() {
|
||||
// see constant OPEN_MAX defined in
|
||||
// /usr/include/sys/syslimits.h on OS X and its use in JVM
|
||||
// initialization in int os:init_2(void) defined in the JVM
|
||||
|
@ -258,7 +258,7 @@ final class BootstrapChecks {
|
|||
|
||||
private final boolean mlockallSet;
|
||||
|
||||
public MlockallCheck(final boolean mlockAllSet) {
|
||||
MlockallCheck(final boolean mlockAllSet) {
|
||||
this.mlockallSet = mlockAllSet;
|
||||
}
|
||||
|
||||
|
@ -360,7 +360,7 @@ final class BootstrapChecks {
|
|||
// visible for testing
|
||||
long getMaxMapCount(Logger logger) {
|
||||
final Path path = getProcSysVmMaxMapCountPath();
|
||||
try (final BufferedReader bufferedReader = getBufferedReader(path)) {
|
||||
try (BufferedReader bufferedReader = getBufferedReader(path)) {
|
||||
final String rawProcSysVmMaxMapCount = readProcSysVmMaxMapCount(bufferedReader);
|
||||
if (rawProcSysVmMaxMapCount != null) {
|
||||
try {
|
||||
|
|
|
@ -48,7 +48,7 @@ final class ESPolicy extends Policy {
|
|||
final PermissionCollection dynamic;
|
||||
final Map<String,Policy> plugins;
|
||||
|
||||
public ESPolicy(PermissionCollection dynamic, Map<String,Policy> plugins, boolean filterBadDefaults) {
|
||||
ESPolicy(PermissionCollection dynamic, Map<String,Policy> plugins, boolean filterBadDefaults) {
|
||||
this.template = Security.readPolicy(getClass().getResource(POLICY_RESOURCE), JarHell.parseClassPath());
|
||||
this.untrusted = Security.readPolicy(getClass().getResource(UNTRUSTED_RESOURCE), new URL[0]);
|
||||
if (filterBadDefaults) {
|
||||
|
@ -150,7 +150,7 @@ final class ESPolicy extends Policy {
|
|||
* @param preImplies a test that is applied to a desired permission before checking if the bad default permission that
|
||||
* this instance wraps implies the desired permission
|
||||
*/
|
||||
public BadDefaultPermission(final Permission badDefaultPermission, final Predicate<Permission> preImplies) {
|
||||
BadDefaultPermission(final Permission badDefaultPermission, final Predicate<Permission> preImplies) {
|
||||
super(badDefaultPermission.getName());
|
||||
this.badDefaultPermission = badDefaultPermission;
|
||||
this.preImplies = preImplies;
|
||||
|
|
|
@ -109,7 +109,7 @@ final class JNAKernel32Library {
|
|||
|
||||
private final ConsoleCtrlHandler handler;
|
||||
|
||||
public NativeHandlerCallback(ConsoleCtrlHandler handler) {
|
||||
NativeHandlerCallback(ConsoleCtrlHandler handler) {
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
|
@ -155,11 +155,11 @@ final class JNAKernel32Library {
|
|||
|
||||
public static class SizeT extends IntegerType {
|
||||
|
||||
public SizeT() {
|
||||
SizeT() {
|
||||
this(0);
|
||||
}
|
||||
|
||||
public SizeT(long value) {
|
||||
SizeT(long value) {
|
||||
super(Native.SIZE_T_SIZE, value);
|
||||
}
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ final class SystemCallFilter {
|
|||
public short len; // number of filters
|
||||
public Pointer filter; // filters
|
||||
|
||||
public SockFProg(SockFilter filters[]) {
|
||||
SockFProg(SockFilter filters[]) {
|
||||
len = (short) filters.length;
|
||||
// serialize struct sock_filter * explicitly, its less confusing than the JNA magic we would need
|
||||
Memory filter = new Memory(len * 8);
|
||||
|
|
|
@ -65,8 +65,8 @@ public abstract class Command implements Closeable {
|
|||
this.close();
|
||||
} catch (final IOException e) {
|
||||
try (
|
||||
final StringWriter sw = new StringWriter();
|
||||
final PrintWriter pw = new PrintWriter(sw)) {
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw)) {
|
||||
e.printStackTrace(pw);
|
||||
terminal.println(sw.toString());
|
||||
} catch (final IOException impossible) {
|
||||
|
|
|
@ -41,7 +41,7 @@ public interface ElasticsearchClient {
|
|||
* @return A future allowing to get back the response.
|
||||
*/
|
||||
<Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(
|
||||
final Action<Request, Response, RequestBuilder> action, final Request request);
|
||||
Action<Request, Response, RequestBuilder> action, Request request);
|
||||
|
||||
/**
|
||||
* Executes a generic action, denoted by an {@link Action}.
|
||||
|
@ -54,7 +54,7 @@ public interface ElasticsearchClient {
|
|||
* @param <RequestBuilder> The request builder type.
|
||||
*/
|
||||
<Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(
|
||||
final Action<Request, Response, RequestBuilder> action, final Request request, ActionListener<Response> listener);
|
||||
Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener);
|
||||
|
||||
/**
|
||||
* Prepares a request builder to execute, specified by {@link Action}.
|
||||
|
@ -66,7 +66,7 @@ public interface ElasticsearchClient {
|
|||
* @return The request builder, that can, at a later stage, execute the request.
|
||||
*/
|
||||
<Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(
|
||||
final Action<Request, Response, RequestBuilder> action);
|
||||
Action<Request, Response, RequestBuilder> action);
|
||||
|
||||
/**
|
||||
* Returns the threadpool used to execute requests on this client
|
||||
|
|
|
@ -401,7 +401,7 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
doExecute(action, request, listener);
|
||||
}
|
||||
|
||||
protected abstract <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void doExecute(final Action<Request, Response, RequestBuilder> action, final Request request, ActionListener<Response> listener);
|
||||
protected abstract <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void doExecute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener);
|
||||
|
||||
@Override
|
||||
public ActionFuture<IndexResponse> index(final IndexRequest request) {
|
||||
|
@ -672,7 +672,7 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
private final ClusterAdmin clusterAdmin;
|
||||
private final IndicesAdmin indicesAdmin;
|
||||
|
||||
public Admin(ElasticsearchClient client) {
|
||||
Admin(ElasticsearchClient client) {
|
||||
this.clusterAdmin = new ClusterAdmin(client);
|
||||
this.indicesAdmin = new IndicesAdmin(client);
|
||||
}
|
||||
|
@ -692,7 +692,7 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
public ClusterAdmin(ElasticsearchClient client) {
|
||||
ClusterAdmin(ElasticsearchClient client) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
|
@ -1218,7 +1218,7 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
public IndicesAdmin(ElasticsearchClient client) {
|
||||
IndicesAdmin(ElasticsearchClient client) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
|
|
|
@ -264,7 +264,7 @@ final class TransportClientNodesService extends AbstractComponent implements Clo
|
|||
|
||||
private volatile int i;
|
||||
|
||||
public RetryListener(NodeListenerCallback<Response> callback, ActionListener<Response> listener,
|
||||
RetryListener(NodeListenerCallback<Response> callback, ActionListener<Response> listener,
|
||||
List<DiscoveryNode> nodes, int index, TransportClient.HostFailureListener hostFailureListener) {
|
||||
this.callback = callback;
|
||||
this.listener = listener;
|
||||
|
|
|
@ -52,21 +52,21 @@ public abstract class AbstractDiffable<T extends Diffable<T>> implements Diffabl
|
|||
/**
|
||||
* Creates simple diff with changes
|
||||
*/
|
||||
public CompleteDiff(T part) {
|
||||
CompleteDiff(T part) {
|
||||
this.part = part;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates simple diff without changes
|
||||
*/
|
||||
public CompleteDiff() {
|
||||
CompleteDiff() {
|
||||
this.part = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read simple diff from the stream
|
||||
*/
|
||||
public CompleteDiff(Reader<T> reader, StreamInput in) throws IOException {
|
||||
CompleteDiff(Reader<T> reader, StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
this.part = reader.read(in);
|
||||
} else {
|
||||
|
|
|
@ -64,7 +64,7 @@ public abstract class AbstractNamedDiffable<T extends NamedDiffable<T>> implemen
|
|||
/**
|
||||
* Creates simple diff with changes
|
||||
*/
|
||||
public CompleteNamedDiff(T part) {
|
||||
CompleteNamedDiff(T part) {
|
||||
this.part = part;
|
||||
this.name = part.getWriteableName();
|
||||
this.minimalSupportedVersion = part.getMinimalSupportedVersion();
|
||||
|
@ -73,7 +73,7 @@ public abstract class AbstractNamedDiffable<T extends NamedDiffable<T>> implemen
|
|||
/**
|
||||
* Creates simple diff without changes
|
||||
*/
|
||||
public CompleteNamedDiff(String name, Version minimalSupportedVersion) {
|
||||
CompleteNamedDiff(String name, Version minimalSupportedVersion) {
|
||||
this.part = null;
|
||||
this.name = name;
|
||||
this.minimalSupportedVersion = minimalSupportedVersion;
|
||||
|
@ -82,7 +82,7 @@ public abstract class AbstractNamedDiffable<T extends NamedDiffable<T>> implemen
|
|||
/**
|
||||
* Read simple diff from the stream
|
||||
*/
|
||||
public CompleteNamedDiff(Class<? extends T> tClass, String name, StreamInput in) throws IOException {
|
||||
CompleteNamedDiff(Class<? extends T> tClass, String name, StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
this.part = in.readNamedWriteable(tClass, name);
|
||||
this.minimalSupportedVersion = part.getMinimalSupportedVersion();
|
||||
|
|
|
@ -713,7 +713,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
|
||||
private final Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
|
||||
public ClusterStateDiff(ClusterState before, ClusterState after) {
|
||||
ClusterStateDiff(ClusterState before, ClusterState after) {
|
||||
fromUuid = before.stateUUID;
|
||||
toUuid = after.stateUUID;
|
||||
toVersion = after.version;
|
||||
|
@ -725,7 +725,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER);
|
||||
}
|
||||
|
||||
public ClusterStateDiff(StreamInput in, DiscoveryNode localNode) throws IOException {
|
||||
ClusterStateDiff(StreamInput in, DiscoveryNode localNode) throws IOException {
|
||||
clusterName = new ClusterName(in);
|
||||
fromUuid = in.readString();
|
||||
toUuid = in.readString();
|
||||
|
|
|
@ -246,7 +246,7 @@ public class ClusterStateObserver {
|
|||
private final String masterNodeId;
|
||||
private final long version;
|
||||
|
||||
public StoredState(ClusterState clusterState) {
|
||||
StoredState(ClusterState clusterState) {
|
||||
this.masterNodeId = clusterState.nodes().getMasterNodeId();
|
||||
this.version = clusterState.version();
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ public class ClusterStateObserver {
|
|||
public final Listener listener;
|
||||
public final Predicate<ClusterState> statePredicate;
|
||||
|
||||
public ObservingContext(Listener listener, Predicate<ClusterState> statePredicate) {
|
||||
ObservingContext(Listener listener, Predicate<ClusterState> statePredicate) {
|
||||
this.listener = listener;
|
||||
this.statePredicate = statePredicate;
|
||||
}
|
||||
|
|
|
@ -166,7 +166,7 @@ public final class DiffableUtils {
|
|||
super(in, keySerializer, valueSerializer);
|
||||
}
|
||||
|
||||
public JdkMapDiff(Map<K, T> before, Map<K, T> after,
|
||||
JdkMapDiff(Map<K, T> before, Map<K, T> after,
|
||||
KeySerializer<K> keySerializer, ValueSerializer<K, T> valueSerializer) {
|
||||
super(keySerializer, valueSerializer);
|
||||
assert after != null && before != null;
|
||||
|
@ -298,7 +298,7 @@ public final class DiffableUtils {
|
|||
super(in, keySerializer, valueSerializer);
|
||||
}
|
||||
|
||||
public ImmutableOpenIntMapDiff(ImmutableOpenIntMap<T> before, ImmutableOpenIntMap<T> after,
|
||||
ImmutableOpenIntMapDiff(ImmutableOpenIntMap<T> before, ImmutableOpenIntMap<T> after,
|
||||
KeySerializer<Integer> keySerializer, ValueSerializer<Integer, T> valueSerializer) {
|
||||
super(keySerializer, valueSerializer);
|
||||
assert after != null && before != null;
|
||||
|
|
|
@ -199,7 +199,7 @@ public class ShardStateAction extends AbstractComponent {
|
|||
private final ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor;
|
||||
private final Logger logger;
|
||||
|
||||
public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, Logger logger) {
|
||||
ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, Logger logger) {
|
||||
this.clusterService = clusterService;
|
||||
this.shardFailedClusterStateTaskExecutor = shardFailedClusterStateTaskExecutor;
|
||||
this.logger = logger;
|
||||
|
@ -365,7 +365,7 @@ public class ShardStateAction extends AbstractComponent {
|
|||
private final ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor;
|
||||
private final Logger logger;
|
||||
|
||||
public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, Logger logger) {
|
||||
ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, Logger logger) {
|
||||
this.clusterService = clusterService;
|
||||
this.shardStartedClusterStateTaskExecutor = shardStartedClusterStateTaskExecutor;
|
||||
this.logger = logger;
|
||||
|
|
|
@ -121,7 +121,7 @@ public interface AliasOrIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -139,7 +139,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContent {
|
|||
public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE));
|
||||
public static final ClusterBlock INDEX_METADATA_BLOCK = new ClusterBlock(9, "index metadata (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.METADATA_WRITE, ClusterBlockLevel.METADATA_READ));
|
||||
|
||||
public static enum State {
|
||||
public enum State {
|
||||
OPEN((byte) 0),
|
||||
CLOSE((byte) 1);
|
||||
|
||||
|
@ -620,7 +620,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContent {
|
|||
private final Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
private final Diff<ImmutableOpenIntMap<Set<String>>> inSyncAllocationIds;
|
||||
|
||||
public IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) {
|
||||
IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) {
|
||||
index = after.index.getName();
|
||||
version = after.version;
|
||||
routingNumShards = after.routingNumShards;
|
||||
|
@ -634,7 +634,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContent {
|
|||
DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance());
|
||||
}
|
||||
|
||||
public IndexMetaDataDiff(StreamInput in) throws IOException {
|
||||
IndexMetaDataDiff(StreamInput in) throws IOException {
|
||||
index = in.readString();
|
||||
routingNumShards = in.readInt();
|
||||
version = in.readLong();
|
||||
|
|
|
@ -502,11 +502,11 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
|||
this(state, options, System.currentTimeMillis(), preserveAliases);
|
||||
}
|
||||
|
||||
public Context(ClusterState state, IndicesOptions options, long startTime) {
|
||||
Context(ClusterState state, IndicesOptions options, long startTime) {
|
||||
this(state, options, startTime, false);
|
||||
}
|
||||
|
||||
public Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) {
|
||||
Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) {
|
||||
this.state = state;
|
||||
this.options = options;
|
||||
this.startTime = startTime;
|
||||
|
@ -754,7 +754,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
|||
private final String defaultDateFormatterPattern;
|
||||
private final DateTimeFormatter defaultDateFormatter;
|
||||
|
||||
public DateMathExpressionResolver(Settings settings) {
|
||||
DateMathExpressionResolver(Settings settings) {
|
||||
String defaultTimeZoneId = settings.get("date_math_expression_resolver.default_time_zone", "UTC");
|
||||
this.defaultTimeZone = DateTimeZone.forID(defaultTimeZoneId);
|
||||
defaultDateFormatterPattern = settings.get("date_math_expression_resolver.default_date_format", "YYYY.MM.dd");
|
||||
|
|
|
@ -600,7 +600,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
private Diff<ImmutableOpenMap<String, IndexTemplateMetaData>> templates;
|
||||
private Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
|
||||
public MetaDataDiff(MetaData before, MetaData after) {
|
||||
MetaDataDiff(MetaData before, MetaData after) {
|
||||
clusterUUID = after.clusterUUID;
|
||||
version = after.version;
|
||||
transientSettings = after.transientSettings;
|
||||
|
@ -610,7 +610,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER);
|
||||
}
|
||||
|
||||
public MetaDataDiff(StreamInput in) throws IOException {
|
||||
MetaDataDiff(StreamInput in) throws IOException {
|
||||
clusterUUID = in.readString();
|
||||
version = in.readLong();
|
||||
transientSettings = Settings.readSettingsFromStream(in);
|
||||
|
|
|
@ -378,12 +378,12 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
|||
|
||||
private final Diff<ImmutableOpenMap<String, IndexRoutingTable>> indicesRouting;
|
||||
|
||||
public RoutingTableDiff(RoutingTable before, RoutingTable after) {
|
||||
RoutingTableDiff(RoutingTable before, RoutingTable after) {
|
||||
version = after.version;
|
||||
indicesRouting = DiffableUtils.diff(before.indicesRouting, after.indicesRouting, DiffableUtils.getStringKeySerializer());
|
||||
}
|
||||
|
||||
public RoutingTableDiff(StreamInput in) throws IOException {
|
||||
RoutingTableDiff(StreamInput in) throws IOException {
|
||||
version = in.readLong();
|
||||
indicesRouting = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexRoutingTable::readFrom,
|
||||
IndexRoutingTable::readDiffFrom);
|
||||
|
|
|
@ -1052,7 +1052,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
|||
private int numShards = 0;
|
||||
private final RoutingNode routingNode;
|
||||
|
||||
public ModelNode(RoutingNode routingNode) {
|
||||
ModelNode(RoutingNode routingNode) {
|
||||
this.routingNode = routingNode;
|
||||
}
|
||||
|
||||
|
@ -1130,7 +1130,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
|||
private final Set<ShardRouting> shards = new HashSet<>(4); // expect few shards of same index to be allocated on same node
|
||||
private int highestPrimary = -1;
|
||||
|
||||
public ModelIndex(String id) {
|
||||
ModelIndex(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
|
@ -1187,7 +1187,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
|||
private final Balancer balancer;
|
||||
private float pivotWeight;
|
||||
|
||||
public NodeSorter(ModelNode[] modelNodes, WeightFunction function, Balancer balancer) {
|
||||
NodeSorter(ModelNode[] modelNodes, WeightFunction function, Balancer balancer) {
|
||||
this.function = function;
|
||||
this.balancer = balancer;
|
||||
this.modelNodes = modelNodes;
|
||||
|
|
|
@ -582,7 +582,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
|||
abstract static class SourcePrioritizedRunnable extends PrioritizedRunnable {
|
||||
protected final String source;
|
||||
|
||||
public SourcePrioritizedRunnable(Priority priority, String source) {
|
||||
SourcePrioritizedRunnable(Priority priority, String source) {
|
||||
super(priority);
|
||||
this.source = source;
|
||||
}
|
||||
|
@ -892,7 +892,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
|||
public final List<UpdateTask> nonFailedTasks;
|
||||
public final Map<Object, ClusterStateTaskExecutor.TaskResult> executionResults;
|
||||
|
||||
public TaskOutputs(TaskInputs taskInputs, ClusterState previousClusterState,
|
||||
TaskOutputs(TaskInputs taskInputs, ClusterState previousClusterState,
|
||||
ClusterState newClusterState, List<UpdateTask> nonFailedTasks,
|
||||
Map<Object, ClusterStateTaskExecutor.TaskResult> executionResults) {
|
||||
this.taskInputs = taskInputs;
|
||||
|
@ -982,7 +982,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
|||
private final ClusterStateTaskListener listener;
|
||||
private final Logger logger;
|
||||
|
||||
public SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
|
||||
SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
|
||||
this.listener = listener;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
@ -1029,7 +1029,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
|||
private final AckedClusterStateTaskListener listener;
|
||||
private final Logger logger;
|
||||
|
||||
public SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
|
||||
SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
|
||||
super(listener, logger);
|
||||
this.listener = listener;
|
||||
this.logger = logger;
|
||||
|
|
|
@ -37,7 +37,7 @@ final class BytesReferenceStreamInput extends StreamInput {
|
|||
private final int length; // the total size of the stream
|
||||
private int offset; // the current position of the stream
|
||||
|
||||
public BytesReferenceStreamInput(BytesRefIterator iterator, final int length) throws IOException {
|
||||
BytesReferenceStreamInput(BytesRefIterator iterator, final int length) throws IOException {
|
||||
this.iterator = iterator;
|
||||
this.slice = iterator.next();
|
||||
this.length = length;
|
||||
|
|
|
@ -166,7 +166,7 @@ public class Cache<K, V> {
|
|||
Entry<K, V> after;
|
||||
State state = State.NEW;
|
||||
|
||||
public Entry(K key, V value, long writeTime) {
|
||||
Entry(K key, V value, long writeTime) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
this.writeTime = this.accessTime = writeTime;
|
||||
|
|
|
@ -433,7 +433,7 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
private final Deque<Map.Entry<K, V>> entries;
|
||||
private final Deque<Node<K, V>> nodes;
|
||||
|
||||
public EntryIterator(Node<K, V> node) {
|
||||
EntryIterator(Node<K, V> node) {
|
||||
entries = new ArrayDeque<>();
|
||||
nodes = new ArrayDeque<>();
|
||||
node.visit(entries, nodes);
|
||||
|
|
|
@ -36,7 +36,7 @@ public class Iterators {
|
|||
private final Iterator<? extends T>[] iterators;
|
||||
private int index = 0;
|
||||
|
||||
public ConcatenatedIterator(Iterator<? extends T>... iterators) {
|
||||
ConcatenatedIterator(Iterator<? extends T>... iterators) {
|
||||
if (iterators == null) {
|
||||
throw new NullPointerException("iterators");
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public enum SpatialStrategy implements Writeable {
|
|||
|
||||
private final String strategyName;
|
||||
|
||||
private SpatialStrategy(String strategyName) {
|
||||
SpatialStrategy(String strategyName) {
|
||||
this.strategyName = strategyName;
|
||||
}
|
||||
|
||||
|
|
|
@ -381,7 +381,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri
|
|||
}
|
||||
}
|
||||
|
||||
public static enum Orientation {
|
||||
public enum Orientation {
|
||||
LEFT,
|
||||
RIGHT;
|
||||
|
||||
|
@ -427,7 +427,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri
|
|||
/**
|
||||
* Enumeration that lists all {@link GeoShapeType}s that can be handled
|
||||
*/
|
||||
public static enum GeoShapeType {
|
||||
public enum GeoShapeType {
|
||||
POINT("point"),
|
||||
MULTIPOINT("multipoint"),
|
||||
LINESTRING("linestring"),
|
||||
|
@ -440,7 +440,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri
|
|||
|
||||
private final String shapename;
|
||||
|
||||
private GeoShapeType(String shapename) {
|
||||
GeoShapeType(String shapename) {
|
||||
this.shapename = shapename;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class ConstantFactory<T> implements InternalFactory<T> {
|
|||
|
||||
private final Initializable<T> initializable;
|
||||
|
||||
public ConstantFactory(Initializable<T> initializable) {
|
||||
ConstantFactory(Initializable<T> initializable) {
|
||||
this.initializable = initializable;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ class DeferredLookups implements Lookups {
|
|||
private final InjectorImpl injector;
|
||||
private final List<Element> lookups = new ArrayList<>();
|
||||
|
||||
public DeferredLookups(InjectorImpl injector) {
|
||||
DeferredLookups(InjectorImpl injector) {
|
||||
this.injector = injector;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ final class EncounterImpl<T> implements TypeEncounter<T> {
|
|||
private List<InjectionListener<? super T>> injectionListeners; // lazy
|
||||
private boolean valid = true;
|
||||
|
||||
public EncounterImpl(Errors errors, Lookups lookups) {
|
||||
EncounterImpl(Errors errors, Lookups lookups) {
|
||||
this.errors = errors;
|
||||
this.lookups = lookups;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ class ExposedKeyFactory<T> implements InternalFactory<T>, BindingProcessor.Creat
|
|||
private final PrivateElements privateElements;
|
||||
private BindingImpl<T> delegate;
|
||||
|
||||
public ExposedKeyFactory(Key<T> key, PrivateElements privateElements) {
|
||||
ExposedKeyFactory(Key<T> key, PrivateElements privateElements) {
|
||||
this.key = key;
|
||||
this.privateElements = privateElements;
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ class Initializer {
|
|||
private final Object source;
|
||||
private MembersInjectorImpl<T> membersInjector;
|
||||
|
||||
public InjectableReference(InjectorImpl injector, T instance, Object source) {
|
||||
InjectableReference(InjectorImpl injector, T instance, Object source) {
|
||||
this.injector = injector;
|
||||
this.instance = Objects.requireNonNull(instance, "instance");
|
||||
this.source = Objects.requireNonNull(source, "source");
|
||||
|
|
|
@ -86,7 +86,7 @@ class InjectionRequestProcessor extends AbstractProcessor {
|
|||
final StaticInjectionRequest request;
|
||||
List<SingleMemberInjector> memberInjectors;
|
||||
|
||||
public StaticInjection(InjectorImpl injector, StaticInjectionRequest request) {
|
||||
StaticInjection(InjectorImpl injector, StaticInjectionRequest request) {
|
||||
this.injector = injector;
|
||||
this.source = request.getSource();
|
||||
this.request = request;
|
||||
|
|
|
@ -33,11 +33,11 @@ class InternalFactoryToProviderAdapter<T> implements InternalFactory<T> {
|
|||
private final Initializable<Provider<? extends T>> initializable;
|
||||
private final Object source;
|
||||
|
||||
public InternalFactoryToProviderAdapter(Initializable<Provider<? extends T>> initializable) {
|
||||
InternalFactoryToProviderAdapter(Initializable<Provider<? extends T>> initializable) {
|
||||
this(initializable, SourceProvider.UNKNOWN_SOURCE);
|
||||
}
|
||||
|
||||
public InternalFactoryToProviderAdapter(
|
||||
InternalFactoryToProviderAdapter(
|
||||
Initializable<Provider<? extends T>> initializable, Object source) {
|
||||
this.initializable = Objects.requireNonNull(initializable, "provider");
|
||||
this.source = Objects.requireNonNull(source, "source");
|
||||
|
|
|
@ -380,7 +380,7 @@ public class Key<T> {
|
|||
}
|
||||
}
|
||||
|
||||
static enum NullAnnotationStrategy implements AnnotationStrategy {
|
||||
enum NullAnnotationStrategy implements AnnotationStrategy {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,7 +30,7 @@ class ProviderToInternalFactoryAdapter<T> implements Provider<T> {
|
|||
private final InjectorImpl injector;
|
||||
private final InternalFactory<? extends T> internalFactory;
|
||||
|
||||
public ProviderToInternalFactoryAdapter(InjectorImpl injector,
|
||||
ProviderToInternalFactoryAdapter(InjectorImpl injector,
|
||||
InternalFactory<? extends T> internalFactory) {
|
||||
this.injector = injector;
|
||||
this.internalFactory = internalFactory;
|
||||
|
|
|
@ -34,7 +34,7 @@ class SingleFieldInjector implements SingleMemberInjector {
|
|||
final Dependency<?> dependency;
|
||||
final InternalFactory<?> factory;
|
||||
|
||||
public SingleFieldInjector(InjectorImpl injector, InjectionPoint injectionPoint, Errors errors)
|
||||
SingleFieldInjector(InjectorImpl injector, InjectionPoint injectionPoint, Errors errors)
|
||||
throws ErrorsException {
|
||||
this.injectionPoint = injectionPoint;
|
||||
this.field = (Field) injectionPoint.getMember();
|
||||
|
|
|
@ -34,7 +34,7 @@ class SingleMethodInjector implements SingleMemberInjector {
|
|||
final SingleParameterInjector<?>[] parameterInjectors;
|
||||
final InjectionPoint injectionPoint;
|
||||
|
||||
public SingleMethodInjector(InjectorImpl injector, InjectionPoint injectionPoint, Errors errors)
|
||||
SingleMethodInjector(InjectorImpl injector, InjectionPoint injectionPoint, Errors errors)
|
||||
throws ErrorsException {
|
||||
this.injectionPoint = injectionPoint;
|
||||
final Method method = (Method) injectionPoint.getMember();
|
||||
|
|
|
@ -43,7 +43,7 @@ class AssistedConstructor<T> {
|
|||
private final List<Parameter> allParameters;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public AssistedConstructor(Constructor<T> constructor, List<TypeLiteral<?>> parameterTypes) {
|
||||
AssistedConstructor(Constructor<T> constructor, List<TypeLiteral<?>> parameterTypes) {
|
||||
this.constructor = constructor;
|
||||
|
||||
Annotation[][] annotations = constructor.getParameterAnnotations();
|
||||
|
|
|
@ -39,7 +39,7 @@ class Parameter {
|
|||
private final Annotation bindingAnnotation;
|
||||
private final boolean isProvider;
|
||||
|
||||
public Parameter(Type type, Annotation[] annotations) {
|
||||
Parameter(Type type, Annotation[] annotations) {
|
||||
this.type = type;
|
||||
this.bindingAnnotation = getBindingAnnotation(annotations);
|
||||
this.isAssisted = hasAssistedAnnotation(annotations);
|
||||
|
|
|
@ -34,11 +34,11 @@ class ParameterListKey {
|
|||
|
||||
private final List<Type> paramList;
|
||||
|
||||
public ParameterListKey(List<Type> paramList) {
|
||||
ParameterListKey(List<Type> paramList) {
|
||||
this.paramList = new ArrayList<>(paramList);
|
||||
}
|
||||
|
||||
public ParameterListKey(Type[] types) {
|
||||
ParameterListKey(Type[] types) {
|
||||
this(Arrays.asList(types));
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ package org.elasticsearch.common.inject.internal;
|
|||
* @author Bob Lee
|
||||
*/
|
||||
class NullOutputException extends NullPointerException {
|
||||
public NullOutputException(String s) {
|
||||
NullOutputException(String s) {
|
||||
super(s);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ public abstract class AbstractMatcher<T> implements Matcher<T> {
|
|||
private static class AndMatcher<T> extends AbstractMatcher<T> {
|
||||
private final Matcher<? super T> a, b;
|
||||
|
||||
public AndMatcher(Matcher<? super T> a, Matcher<? super T> b) {
|
||||
AndMatcher(Matcher<? super T> a, Matcher<? super T> b) {
|
||||
this.a = a;
|
||||
this.b = b;
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ public abstract class AbstractMatcher<T> implements Matcher<T> {
|
|||
private static class OrMatcher<T> extends AbstractMatcher<T> {
|
||||
private final Matcher<? super T> a, b;
|
||||
|
||||
public OrMatcher(Matcher<? super T> a, Matcher<? super T> b) {
|
||||
OrMatcher(Matcher<? super T> a, Matcher<? super T> b) {
|
||||
this.a = a;
|
||||
this.b = b;
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ public class Matchers {
|
|||
private static class AnnotatedWithType extends AbstractMatcher<AnnotatedElement> {
|
||||
private final Class<? extends Annotation> annotationType;
|
||||
|
||||
public AnnotatedWithType(Class<? extends Annotation> annotationType) {
|
||||
AnnotatedWithType(Class<? extends Annotation> annotationType) {
|
||||
this.annotationType = Objects.requireNonNull(annotationType, "annotation type");
|
||||
checkForRuntimeRetention(annotationType);
|
||||
}
|
||||
|
@ -152,7 +152,7 @@ public class Matchers {
|
|||
private static class AnnotatedWith extends AbstractMatcher<AnnotatedElement> {
|
||||
private final Annotation annotation;
|
||||
|
||||
public AnnotatedWith(Annotation annotation) {
|
||||
AnnotatedWith(Annotation annotation) {
|
||||
this.annotation = Objects.requireNonNull(annotation, "annotation");
|
||||
checkForRuntimeRetention(annotation.annotationType());
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ public class Matchers {
|
|||
private static class SubclassesOf extends AbstractMatcher<Class> {
|
||||
private final Class<?> superclass;
|
||||
|
||||
public SubclassesOf(Class<?> superclass) {
|
||||
SubclassesOf(Class<?> superclass) {
|
||||
this.superclass = Objects.requireNonNull(superclass, "superclass");
|
||||
}
|
||||
|
||||
|
@ -227,7 +227,7 @@ public class Matchers {
|
|||
private static class Only extends AbstractMatcher<Object> {
|
||||
private final Object value;
|
||||
|
||||
public Only(Object value) {
|
||||
Only(Object value) {
|
||||
this.value = Objects.requireNonNull(value, "value");
|
||||
}
|
||||
|
||||
|
@ -263,7 +263,7 @@ public class Matchers {
|
|||
private static class IdenticalTo extends AbstractMatcher<Object> {
|
||||
private final Object value;
|
||||
|
||||
public IdenticalTo(Object value) {
|
||||
IdenticalTo(Object value) {
|
||||
this.value = Objects.requireNonNull(value, "value");
|
||||
}
|
||||
|
||||
|
@ -301,7 +301,7 @@ public class Matchers {
|
|||
private final transient Package targetPackage;
|
||||
private final String packageName;
|
||||
|
||||
public InPackage(Package targetPackage) {
|
||||
InPackage(Package targetPackage) {
|
||||
this.targetPackage = Objects.requireNonNull(targetPackage, "package");
|
||||
this.packageName = targetPackage.getName();
|
||||
}
|
||||
|
@ -345,7 +345,7 @@ public class Matchers {
|
|||
private static class InSubpackage extends AbstractMatcher<Class> {
|
||||
private final String targetPackageName;
|
||||
|
||||
public InSubpackage(String targetPackageName) {
|
||||
InSubpackage(String targetPackageName) {
|
||||
this.targetPackageName = targetPackageName;
|
||||
}
|
||||
|
||||
|
@ -384,7 +384,7 @@ public class Matchers {
|
|||
private static class Returns extends AbstractMatcher<Method> {
|
||||
private final Matcher<? super Class<?>> returnType;
|
||||
|
||||
public Returns(Matcher<? super Class<?>> returnType) {
|
||||
Returns(Matcher<? super Class<?>> returnType) {
|
||||
this.returnType = Objects.requireNonNull(returnType, "return type matcher");
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ class NamedImpl implements Named {
|
|||
|
||||
private final String value;
|
||||
|
||||
public NamedImpl(String value) {
|
||||
NamedImpl(String value) {
|
||||
this.value = Objects.requireNonNull(value, "name");
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ public interface Writeable {
|
|||
/**
|
||||
* Write this into the {@linkplain StreamOutput}.
|
||||
*/
|
||||
void writeTo(final StreamOutput out) throws IOException;
|
||||
void writeTo(StreamOutput out) throws IOException;
|
||||
|
||||
/**
|
||||
* Reference to a method that can write some object to a {@link StreamOutput}.
|
||||
|
@ -60,7 +60,7 @@ public interface Writeable {
|
|||
* @param out Output to write the {@code value} too
|
||||
* @param value The value to add
|
||||
*/
|
||||
void write(final StreamOutput out, final V value) throws IOException;
|
||||
void write(StreamOutput out, V value) throws IOException;
|
||||
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ public interface Writeable {
|
|||
*
|
||||
* @param in Input to read the value from
|
||||
*/
|
||||
V read(final StreamInput in) throws IOException;
|
||||
V read(StreamInput in) throws IOException;
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -156,7 +156,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
final Weight[] filterWeights;
|
||||
final boolean needsScores;
|
||||
|
||||
public CustomBoostFactorWeight(Query parent, Weight subQueryWeight, Weight[] filterWeights, boolean needsScores) throws IOException {
|
||||
CustomBoostFactorWeight(Query parent, Weight subQueryWeight, Weight[] filterWeights, boolean needsScores) throws IOException {
|
||||
super(parent);
|
||||
this.subQueryWeight = subQueryWeight;
|
||||
this.filterWeights = filterWeights;
|
||||
|
|
|
@ -109,7 +109,7 @@ public class FunctionScoreQuery extends Query {
|
|||
final Weight subQueryWeight;
|
||||
final boolean needsScores;
|
||||
|
||||
public CustomBoostFactorWeight(Query parent, Weight subQueryWeight, boolean needsScores) throws IOException {
|
||||
CustomBoostFactorWeight(Query parent, Weight subQueryWeight, boolean needsScores) throws IOException {
|
||||
super(parent);
|
||||
this.subQueryWeight = subQueryWeight;
|
||||
this.needsScores = needsScores;
|
||||
|
|
|
@ -38,7 +38,7 @@ public class ScriptScoreFunction extends ScoreFunction {
|
|||
protected int docid;
|
||||
protected float score;
|
||||
|
||||
public CannedScorer() {
|
||||
CannedScorer() {
|
||||
super(null);
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ final class PerThreadIDAndVersionLookup {
|
|||
/**
|
||||
* Initialize lookup for the provided segment
|
||||
*/
|
||||
public PerThreadIDAndVersionLookup(LeafReader reader) throws IOException {
|
||||
PerThreadIDAndVersionLookup(LeafReader reader) throws IOException {
|
||||
TermsEnum termsEnum = null;
|
||||
NumericDocValues versions = null;
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ public enum Recyclers {
|
|||
}
|
||||
}
|
||||
|
||||
final int slot() {
|
||||
int slot() {
|
||||
final long id = Thread.currentThread().getId();
|
||||
// don't trust Thread.hashCode to have equiprobable low bits
|
||||
int slot = (int) BitMixer.mix64(id);
|
||||
|
|
|
@ -40,7 +40,7 @@ public enum DateTimeUnit {
|
|||
private final byte id;
|
||||
private final Function<DateTimeZone, DateTimeField> fieldFunction;
|
||||
|
||||
private DateTimeUnit(byte id, Function<DateTimeZone, DateTimeField> fieldFunction) {
|
||||
DateTimeUnit(byte id, Function<DateTimeZone, DateTimeField> fieldFunction) {
|
||||
this.id = id;
|
||||
this.fieldFunction = fieldFunction;
|
||||
}
|
||||
|
|
|
@ -563,7 +563,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
private final Logger logger;
|
||||
private final Consumer<T> accept;
|
||||
|
||||
public Updater(Consumer<T> consumer, Logger logger, Consumer<T> accept) {
|
||||
Updater(Consumer<T> consumer, Logger logger, Consumer<T> accept) {
|
||||
this.consumer = consumer;
|
||||
this.logger = logger;
|
||||
this.accept = accept;
|
||||
|
|
|
@ -36,7 +36,7 @@ final class BigByteArray extends AbstractBigArray implements ByteArray {
|
|||
private byte[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigByteArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
BigByteArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
super(BYTE_PAGE_SIZE, bigArrays, clearOnResize);
|
||||
this.size = size;
|
||||
pages = new byte[numPages(size)][];
|
||||
|
|
|
@ -35,7 +35,7 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
|
|||
private long[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigDoubleArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
BigDoubleArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
super(LONG_PAGE_SIZE, bigArrays, clearOnResize);
|
||||
this.size = size;
|
||||
pages = new long[numPages(size)][];
|
||||
|
|
|
@ -35,7 +35,7 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
|
|||
private int[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigFloatArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
BigFloatArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
super(INT_PAGE_SIZE, bigArrays, clearOnResize);
|
||||
this.size = size;
|
||||
pages = new int[numPages(size)][];
|
||||
|
|
|
@ -35,7 +35,7 @@ final class BigIntArray extends AbstractBigArray implements IntArray {
|
|||
private int[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigIntArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
BigIntArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
super(INT_PAGE_SIZE, bigArrays, clearOnResize);
|
||||
this.size = size;
|
||||
pages = new int[numPages(size)][];
|
||||
|
|
|
@ -35,7 +35,7 @@ final class BigLongArray extends AbstractBigArray implements LongArray {
|
|||
private long[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigLongArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
BigLongArray(long size, BigArrays bigArrays, boolean clearOnResize) {
|
||||
super(LONG_PAGE_SIZE, bigArrays, clearOnResize);
|
||||
this.size = size;
|
||||
pages = new long[numPages(size)][];
|
||||
|
|
|
@ -35,7 +35,7 @@ final class BigObjectArray<T> extends AbstractBigArray implements ObjectArray<T>
|
|||
private Object[][] pages;
|
||||
|
||||
/** Constructor. */
|
||||
public BigObjectArray(long size, BigArrays bigArrays) {
|
||||
BigObjectArray(long size, BigArrays bigArrays) {
|
||||
super(OBJECT_PAGE_SIZE, bigArrays, true);
|
||||
this.size = size;
|
||||
pages = new Object[numPages(size)][];
|
||||
|
|
|
@ -226,7 +226,7 @@ public class CollectionUtils {
|
|||
private final List<T> in;
|
||||
private final int distance;
|
||||
|
||||
public RotatedList(List<T> list, int distance) {
|
||||
RotatedList(List<T> list, int distance) {
|
||||
if (distance < 0 || distance >= list.size()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
|
|
@ -161,7 +161,7 @@ public class LongObjectPagedHashMap<T> extends AbstractPagedHashMap implements I
|
|||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -161,7 +161,7 @@ public class EsExecutors {
|
|||
final AtomicInteger threadNumber = new AtomicInteger(1);
|
||||
final String namePrefix;
|
||||
|
||||
public EsThreadFactory(String namePrefix) {
|
||||
EsThreadFactory(String namePrefix) {
|
||||
this.namePrefix = namePrefix;
|
||||
SecurityManager s = System.getSecurityManager();
|
||||
group = (s != null) ? s.getThreadGroup() :
|
||||
|
@ -189,7 +189,7 @@ public class EsExecutors {
|
|||
|
||||
ThreadPoolExecutor executor;
|
||||
|
||||
public ExecutorScalingQueue() {
|
||||
ExecutorScalingQueue() {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -250,14 +250,14 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
|
|||
final Priority priority;
|
||||
final long insertionOrder;
|
||||
|
||||
public PrioritizedFutureTask(Runnable runnable, Priority priority, T value, long insertionOrder) {
|
||||
PrioritizedFutureTask(Runnable runnable, Priority priority, T value, long insertionOrder) {
|
||||
super(runnable, value);
|
||||
this.task = runnable;
|
||||
this.priority = priority;
|
||||
this.insertionOrder = insertionOrder;
|
||||
}
|
||||
|
||||
public PrioritizedFutureTask(PrioritizedCallable<T> callable, long insertionOrder) {
|
||||
PrioritizedFutureTask(PrioritizedCallable<T> callable, long insertionOrder) {
|
||||
super(callable);
|
||||
this.task = callable;
|
||||
this.priority = callable.priority();
|
||||
|
|
|
@ -267,7 +267,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
*/
|
||||
private Value targetObject;
|
||||
|
||||
public Target(XContentParser parser) {
|
||||
Target(XContentParser parser) {
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
|
@ -360,7 +360,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
final ParseField field;
|
||||
final boolean required;
|
||||
|
||||
public ConstructorArgInfo(ParseField field, boolean required) {
|
||||
ConstructorArgInfo(ParseField field, boolean required) {
|
||||
this.field = field;
|
||||
this.required = required;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue