mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 09:28:27 +00:00
Enable Checkstyle RedundantModifier
This commit is contained in:
parent
a728c4bb5c
commit
0e7faf1005
@ -58,6 +58,8 @@
|
||||
-->
|
||||
<module name="ModifierOrder" />
|
||||
|
||||
<module name="RedundantModifier" />
|
||||
|
||||
<!-- We don't use Java's builtin serialization and we suppress all warning
|
||||
about it. The flip side of that coin is that we shouldn't _try_ to use
|
||||
it. We can't outright ban it with ForbiddenApis because it complain about
|
||||
|
@ -66,7 +66,7 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
||||
}
|
||||
}
|
||||
|
||||
private static final Throwable deduplicateCause(Throwable cause, ShardSearchFailure[] shardFailures) {
|
||||
private static Throwable deduplicateCause(Throwable cause, ShardSearchFailure[] shardFailures) {
|
||||
if (shardFailures == null) {
|
||||
throw new IllegalArgumentException("shardSearchFailures must not be null");
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ final class Seccomp {
|
||||
// Linux implementation, based on seccomp(2) or prctl(2) with bpf filtering
|
||||
|
||||
/** Access to non-standard Linux libc methods */
|
||||
static interface LinuxLibrary extends Library {
|
||||
interface LinuxLibrary extends Library {
|
||||
/**
|
||||
* maps to prctl(2)
|
||||
*/
|
||||
@ -107,7 +107,7 @@ final class Seccomp {
|
||||
* this is the only way, DON'T use it on some other architecture unless you know wtf you are doing
|
||||
*/
|
||||
NativeLong syscall(NativeLong number, Object... args);
|
||||
};
|
||||
}
|
||||
|
||||
// null if unavailable or something goes wrong.
|
||||
private static final LinuxLibrary linux_libc;
|
||||
@ -415,7 +415,7 @@ final class Seccomp {
|
||||
// OS X implementation via sandbox(7)
|
||||
|
||||
/** Access to non-standard OS X libc methods */
|
||||
static interface MacLibrary extends Library {
|
||||
interface MacLibrary extends Library {
|
||||
/**
|
||||
* maps to sandbox_init(3), since Leopard
|
||||
*/
|
||||
@ -489,7 +489,7 @@ final class Seccomp {
|
||||
// Solaris implementation via priv_set(3C)
|
||||
|
||||
/** Access to non-standard Solaris libc methods */
|
||||
static interface SolarisLibrary extends Library {
|
||||
interface SolarisLibrary extends Library {
|
||||
/**
|
||||
* see priv_set(3C), a convenience method for setppriv(2).
|
||||
*/
|
||||
|
@ -278,7 +278,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
|
||||
return latch;
|
||||
}
|
||||
|
||||
private final void maybeRefresh() {
|
||||
private void maybeRefresh() {
|
||||
// Short-circuit if not enabled
|
||||
if (enabled) {
|
||||
refresh();
|
||||
|
@ -155,7 +155,7 @@ public abstract class BytesReference implements Accountable, Comparable<BytesRef
|
||||
/**
|
||||
* Compares the two references using the given int function.
|
||||
*/
|
||||
private static final int compareIterators(final BytesReference a, final BytesReference b, final ToIntBiFunction<BytesRef, BytesRef> f) {
|
||||
private static int compareIterators(final BytesReference a, final BytesReference b, final ToIntBiFunction<BytesRef, BytesRef> f) {
|
||||
try {
|
||||
// we use the iterators since it's a 0-copy comparison where possible!
|
||||
final long lengthToCompare = Math.min(a.length(), b.length());
|
||||
@ -201,7 +201,7 @@ public abstract class BytesReference implements Accountable, Comparable<BytesRef
|
||||
}
|
||||
}
|
||||
|
||||
private static final void advance(final BytesRef ref, final int length) {
|
||||
private static void advance(final BytesRef ref, final int length) {
|
||||
assert ref.length >= length : " ref.length: " + ref.length + " length: " + length;
|
||||
assert ref.offset+length < ref.bytes.length || (ref.offset+length == ref.bytes.length && ref.length-length == 0)
|
||||
: "offset: " + ref.offset + " ref.bytes.length: " + ref.bytes.length + " length: " + length + " ref.length: " + ref.length;
|
||||
|
@ -132,7 +132,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
@ -162,7 +162,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
|
@ -136,7 +136,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
@ -164,7 +164,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
|
@ -172,7 +172,7 @@ public class GeoHashUtils {
|
||||
return BitUtil.flipFlop(((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd)));
|
||||
}
|
||||
|
||||
private static final char encode(int x, int y) {
|
||||
private static char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
|
@ -98,27 +98,27 @@ public final class GeoPoint {
|
||||
return this.resetFromIndexHash(BitUtil.flipFlop((geohashLong >>> 4) << ((level * 5) + 2)));
|
||||
}
|
||||
|
||||
public final double lat() {
|
||||
public double lat() {
|
||||
return this.lat;
|
||||
}
|
||||
|
||||
public final double getLat() {
|
||||
public double getLat() {
|
||||
return this.lat;
|
||||
}
|
||||
|
||||
public final double lon() {
|
||||
public double lon() {
|
||||
return this.lon;
|
||||
}
|
||||
|
||||
public final double getLon() {
|
||||
public double getLon() {
|
||||
return this.lon;
|
||||
}
|
||||
|
||||
public final String geohash() {
|
||||
public String geohash() {
|
||||
return stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
public final String getGeohash() {
|
||||
public String getGeohash() {
|
||||
return stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
|
@ -631,7 +631,7 @@ public class PolygonBuilder extends ShapeBuilder {
|
||||
return concat(component, direction ^ orientation, points, offset, edges, toffset, length);
|
||||
}
|
||||
|
||||
private static final int top(Coordinate[] points, int offset, int length) {
|
||||
private static int top(Coordinate[] points, int offset, int length) {
|
||||
int top = 0; // we start at 1 here since top points to 0
|
||||
for (int i = 1; i < length; i++) {
|
||||
if (points[offset + i].y < points[offset + top].y) {
|
||||
@ -645,7 +645,7 @@ public class PolygonBuilder extends ShapeBuilder {
|
||||
return top;
|
||||
}
|
||||
|
||||
private static final double[] range(Coordinate[] points, int offset, int length) {
|
||||
private static double[] range(Coordinate[] points, int offset, int length) {
|
||||
double minX = points[0].x;
|
||||
double maxX = points[0].x;
|
||||
double minY = points[0].y;
|
||||
|
@ -117,12 +117,12 @@ public final class AllTermQuery extends Query {
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
public final float getValueForNormalization() throws IOException {
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return stats.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void normalize(float norm, float topLevelBoost) {
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
stats.normalize(norm, topLevelBoost);
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ public final class AllTokenStream extends TokenFilter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean incrementToken() throws IOException {
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (!input.incrementToken()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -457,7 +457,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
||||
return changed;
|
||||
}
|
||||
|
||||
private static final boolean applyDeletes(Set<String> deletes, Settings.Builder builder) {
|
||||
private static boolean applyDeletes(Set<String> deletes, Settings.Builder builder) {
|
||||
boolean changed = false;
|
||||
for (String entry : deletes) {
|
||||
Set<String> keysToRemove = new HashSet<>();
|
||||
|
@ -178,7 +178,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean isPrivateSetting(String key) {
|
||||
protected boolean isPrivateSetting(String key) {
|
||||
switch (key) {
|
||||
case IndexMetaData.SETTING_CREATION_DATE:
|
||||
case IndexMetaData.SETTING_INDEX_UUID:
|
||||
|
@ -465,7 +465,7 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void apply(T value, Settings current, Settings previous) {
|
||||
public void apply(T value, Settings current, Settings previous) {
|
||||
logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current));
|
||||
consumer.accept(value);
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
|
||||
return load(() -> StreamInput.wrap(source), (inStream, props) -> props.load(inStream));
|
||||
}
|
||||
|
||||
private final <T extends Closeable> Map<String, String> load(
|
||||
private <T extends Closeable> Map<String, String> load(
|
||||
Supplier<T> supplier,
|
||||
IOExceptionThrowingBiConsumer<T, Properties> properties
|
||||
) throws IOException {
|
||||
|
@ -184,7 +184,7 @@ public abstract class ExtensionPoint {
|
||||
* @param extension the extension to register
|
||||
* @throws IllegalArgumentException iff the class is already registered
|
||||
*/
|
||||
public final void registerExtension(Class<? extends T> extension) {
|
||||
public void registerExtension(Class<? extends T> extension) {
|
||||
if (extensions.contains(extension)) {
|
||||
throw new IllegalArgumentException("Can't register the same [" + this.name + "] more than once for [" + extension.getName() + "]");
|
||||
}
|
||||
@ -192,7 +192,7 @@ public abstract class ExtensionPoint {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void bindExtensions(Binder binder) {
|
||||
protected void bindExtensions(Binder binder) {
|
||||
Multibinder<T> allocationMultibinder = Multibinder.newSetBinder(binder, extensionClass);
|
||||
for (Class<? extends T> clazz : extensions) {
|
||||
binder.bind(clazz).asEagerSingleton();
|
||||
@ -227,7 +227,7 @@ public abstract class ExtensionPoint {
|
||||
*
|
||||
* @throws IllegalArgumentException iff the key is already registered
|
||||
*/
|
||||
public final void registerExtension(K key, V value) {
|
||||
public void registerExtension(K key, V value) {
|
||||
V old = map.put(key, value);
|
||||
if (old != null) {
|
||||
throw new IllegalArgumentException("Cannot register [" + this.name + "] with key [" + key + "] to [" + value + "], already registered to [" + old + "]");
|
||||
|
@ -55,7 +55,7 @@ public class MasterFaultDetection extends FaultDetection {
|
||||
|
||||
public static final String MASTER_PING_ACTION_NAME = "internal:discovery/zen/fd/master_ping";
|
||||
|
||||
public static interface Listener {
|
||||
public interface Listener {
|
||||
|
||||
/** called when pinging the master failed, like a timeout, transport disconnects etc */
|
||||
void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason);
|
||||
|
@ -812,7 +812,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void close() {
|
||||
public void close() {
|
||||
if (closed.compareAndSet(false, true) && locks != null) {
|
||||
for (Lock lock : locks) {
|
||||
try {
|
||||
|
@ -105,7 +105,7 @@ public class LocalAllocateDangledIndices extends AbstractComponent {
|
||||
});
|
||||
}
|
||||
|
||||
public static interface Listener {
|
||||
public interface Listener {
|
||||
void onResponse(AllocateDangledResponse response);
|
||||
|
||||
void onFailure(Throwable e);
|
||||
|
@ -27,7 +27,7 @@ import java.util.Set;
|
||||
*/
|
||||
public interface CharMatcher {
|
||||
|
||||
public static class ByUnicodeCategory implements CharMatcher {
|
||||
class ByUnicodeCategory implements CharMatcher {
|
||||
|
||||
public static CharMatcher of(byte unicodeCategory) {
|
||||
return new ByUnicodeCategory(unicodeCategory);
|
||||
@ -134,5 +134,5 @@ public interface CharMatcher {
|
||||
}
|
||||
|
||||
/** Returns true if, and only if, the provided character matches this character class. */
|
||||
public boolean isTokenChar(int c);
|
||||
boolean isTokenChar(int c);
|
||||
}
|
||||
|
@ -25,6 +25,6 @@ public interface MultiTermAwareComponent {
|
||||
/** Returns an analysis component to handle analysis if multi-term queries.
|
||||
* The returned component must be a TokenizerFactory, TokenFilterFactory or CharFilterFactory.
|
||||
*/
|
||||
public Object getMultiTermComponent();
|
||||
Object getMultiTermComponent();
|
||||
|
||||
}
|
||||
|
@ -502,7 +502,7 @@ public enum FieldData {
|
||||
};
|
||||
}
|
||||
|
||||
private static interface ToStringValues {
|
||||
private interface ToStringValues {
|
||||
|
||||
void get(int docID, List<CharSequence> values);
|
||||
|
||||
|
@ -47,10 +47,9 @@ public abstract class Ordinals implements Accountable {
|
||||
return ordinals(NO_VALUES);
|
||||
}
|
||||
|
||||
public static interface ValuesHolder {
|
||||
|
||||
public abstract BytesRef lookupOrd(long ord);
|
||||
public interface ValuesHolder {
|
||||
|
||||
BytesRef lookupOrd(long ord);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
|
||||
/**
|
||||
* @return the number of bytes for the given term
|
||||
*/
|
||||
public long bytesPerValue(BytesRef term);
|
||||
long bytesPerValue(BytesRef term);
|
||||
|
||||
/**
|
||||
* Execute any pre-loading estimations for the terms. May also
|
||||
@ -107,7 +107,7 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
|
||||
* @param terms terms to be estimated
|
||||
* @return A TermsEnum for the given terms
|
||||
*/
|
||||
public TermsEnum beforeLoad(Terms terms) throws IOException;
|
||||
TermsEnum beforeLoad(Terms terms) throws IOException;
|
||||
|
||||
/**
|
||||
* Possibly adjust a circuit breaker after field data has been loaded,
|
||||
@ -116,6 +116,6 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
|
||||
* @param termsEnum terms that were loaded
|
||||
* @param actualUsed actual field data memory usage
|
||||
*/
|
||||
public void afterLoad(TermsEnum termsEnum, long actualUsed);
|
||||
void afterLoad(TermsEnum termsEnum, long actualUsed);
|
||||
}
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ final class DocumentParser {
|
||||
this.docMapper = docMapper;
|
||||
}
|
||||
|
||||
final ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
|
||||
ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
|
||||
validateType(source);
|
||||
|
||||
final Mapping mapping = docMapper.mapping();
|
||||
|
@ -139,7 +139,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
public abstract String typeName();
|
||||
|
||||
/** Checks this type is the same type as other. Adds a conflict if they are different. */
|
||||
private final void checkTypeName(MappedFieldType other) {
|
||||
private void checkTypeName(MappedFieldType other) {
|
||||
if (typeName().equals(other.typeName()) == false) {
|
||||
throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
|
||||
} else if (getClass() != other.getClass()) {
|
||||
|
@ -31,7 +31,7 @@ import java.util.Map;
|
||||
*/
|
||||
public abstract class MetadataFieldMapper extends FieldMapper {
|
||||
|
||||
public static interface TypeParser extends Mapper.TypeParser {
|
||||
public interface TypeParser extends Mapper.TypeParser {
|
||||
|
||||
@Override
|
||||
MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
|
||||
|
@ -155,7 +155,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||
}
|
||||
|
||||
/** Get an instance based on the number of bytes that has been used to encode values. */
|
||||
public static final Encoding of(int numBytesPerValue) {
|
||||
public static Encoding of(int numBytesPerValue) {
|
||||
final Encoding instance = INSTANCES[numBytesPerValue];
|
||||
if (instance == null) {
|
||||
throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
|
||||
@ -170,7 +170,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||
* <li>1m: 8 bytes</li>
|
||||
* <li>1cm: 8 bytes</li>
|
||||
* <li>1mm: 10 bytes</li></ul> */
|
||||
public static final Encoding of(DistanceUnit.Distance precision) {
|
||||
public static Encoding of(DistanceUnit.Distance precision) {
|
||||
for (Encoding encoding : INSTANCES) {
|
||||
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
|
||||
return encoding;
|
||||
@ -206,7 +206,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||
}
|
||||
|
||||
/** The number of bytes required to encode a single geo point. */
|
||||
public final int numBytes() {
|
||||
public int numBytes() {
|
||||
return numBytes;
|
||||
}
|
||||
|
||||
|
@ -52,19 +52,19 @@ final class LegacyIpIndexFieldData implements IndexFieldData<AtomicFieldData> {
|
||||
this.logger = Loggers.getLogger(getClass());
|
||||
}
|
||||
|
||||
public final String getFieldName() {
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public final void clear() {
|
||||
public void clear() {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
public final void clear(IndexReader reader) {
|
||||
public void clear(IndexReader reader) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
public final Index index() {
|
||||
public Index index() {
|
||||
return index;
|
||||
}
|
||||
|
||||
|
@ -450,7 +450,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
public String toString() {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
|
@ -1538,7 +1538,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
return mapperService.documentMapperWithAutoCreate(type);
|
||||
}
|
||||
|
||||
private final EngineConfig newEngineConfig(EngineConfig.OpenMode openMode) {
|
||||
private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode) {
|
||||
final IndexShardRecoveryPerformer translogRecoveryPerformer = new IndexShardRecoveryPerformer(shardId, mapperService, logger);
|
||||
return new EngineConfig(openMode, shardId,
|
||||
threadPool, indexSettings, warmer, store, deletionPolicy, indexSettings.getMergePolicy(),
|
||||
|
@ -130,7 +130,7 @@ final class StoreRecovery {
|
||||
return false;
|
||||
}
|
||||
|
||||
final void addIndices(RecoveryState.Index indexRecoveryStats, Directory target, Directory... sources) throws IOException {
|
||||
void addIndices(RecoveryState.Index indexRecoveryStats, Directory target, Directory... sources) throws IOException {
|
||||
target = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target);
|
||||
try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(target, indexRecoveryStats),
|
||||
new IndexWriterConfig(null)
|
||||
|
@ -30,7 +30,7 @@ public final class DirectoryUtils {
|
||||
|
||||
private DirectoryUtils() {} // no instance
|
||||
|
||||
static final <T extends Directory> Directory getLeafDirectory(FilterDirectory dir, Class<T> targetClass) {
|
||||
static <T extends Directory> Directory getLeafDirectory(FilterDirectory dir, Class<T> targetClass) {
|
||||
Directory current = dir.getDelegate();
|
||||
while (true) {
|
||||
if ((current instanceof FilterDirectory)) {
|
||||
|
@ -556,7 +556,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||
failIfCorrupted(directory, shardId);
|
||||
}
|
||||
|
||||
private static final void failIfCorrupted(Directory directory, ShardId shardId) throws IOException {
|
||||
private static void failIfCorrupted(Directory directory, ShardId shardId) throws IOException {
|
||||
final String[] files = directory.listAll();
|
||||
List<CorruptIndexException> ex = new ArrayList<>();
|
||||
for (String file : files) {
|
||||
@ -1033,7 +1033,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||
return null;
|
||||
}
|
||||
|
||||
private final int numSegmentFiles() { // only for asserts
|
||||
private int numSegmentFiles() { // only for asserts
|
||||
int count = 0;
|
||||
for (StoreFileMetaData file : this) {
|
||||
if (file.name().startsWith(IndexFileNames.SEGMENTS)) {
|
||||
@ -1353,8 +1353,8 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||
/**
|
||||
* A listener that is executed once the store is closed and all references to it are released
|
||||
*/
|
||||
public static interface OnClose extends Callback<ShardLock> {
|
||||
static final OnClose EMPTY = new OnClose() {
|
||||
public interface OnClose extends Callback<ShardLock> {
|
||||
OnClose EMPTY = new OnClose() {
|
||||
/**
|
||||
* This method is called while the provided {@link org.elasticsearch.env.ShardLock} is held.
|
||||
* This method is only called once after all resources for a store are released.
|
||||
|
@ -215,7 +215,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||
}
|
||||
|
||||
/** recover all translog files found on disk */
|
||||
private final ArrayList<TranslogReader> recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException {
|
||||
private ArrayList<TranslogReader> recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException {
|
||||
boolean success = false;
|
||||
ArrayList<TranslogReader> foundTranslogs = new ArrayList<>();
|
||||
final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work
|
||||
|
@ -328,11 +328,11 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
|
||||
/**
|
||||
* Returns the current size of the cache
|
||||
*/
|
||||
final int count() {
|
||||
int count() {
|
||||
return cache.count();
|
||||
}
|
||||
|
||||
final int numRegisteredCloseListeners() { // for testing
|
||||
int numRegisteredCloseListeners() { // for testing
|
||||
return registeredClosedListeners.size();
|
||||
}
|
||||
}
|
||||
|
@ -486,12 +486,12 @@ public class RecoverySourceHandler {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void write(int b) throws IOException {
|
||||
public void write(int b) throws IOException {
|
||||
throw new UnsupportedOperationException("we can't send single bytes over the wire");
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void write(byte[] b, int offset, int length) throws IOException {
|
||||
public void write(byte[] b, int offset, int length) throws IOException {
|
||||
sendNextChunk(position, new BytesArray(b, offset, length), md.length() == position + length);
|
||||
position += length;
|
||||
assert md.length() >= position : "length: " + md.length() + " but positions was: " + position;
|
||||
|
@ -84,21 +84,21 @@ public interface ScriptContext {
|
||||
this.key = pluginName + "_" + operation;
|
||||
}
|
||||
|
||||
public final String getPluginName() {
|
||||
public String getPluginName() {
|
||||
return pluginName;
|
||||
}
|
||||
|
||||
public final String getOperation() {
|
||||
public String getOperation() {
|
||||
return operation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String getKey() {
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
public String toString() {
|
||||
return getKey();
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
* to the original BytesRef. */
|
||||
BytesRef parseBytesRef(String value);
|
||||
|
||||
public static final DocValueFormat RAW = new DocValueFormat() {
|
||||
DocValueFormat RAW = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
@ -121,7 +121,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
}
|
||||
};
|
||||
|
||||
public static final class DateTime implements DocValueFormat {
|
||||
final class DateTime implements DocValueFormat {
|
||||
|
||||
public static final String NAME = "date_time";
|
||||
|
||||
@ -181,7 +181,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
}
|
||||
}
|
||||
|
||||
public static final DocValueFormat GEOHASH = new DocValueFormat() {
|
||||
DocValueFormat GEOHASH = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
@ -223,7 +223,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
}
|
||||
};
|
||||
|
||||
public static final DocValueFormat BOOLEAN = new DocValueFormat() {
|
||||
DocValueFormat BOOLEAN = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
@ -271,7 +271,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
}
|
||||
};
|
||||
|
||||
public static final DocValueFormat IP = new DocValueFormat() {
|
||||
DocValueFormat IP = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
@ -315,7 +315,7 @@ public interface DocValueFormat extends NamedWriteable {
|
||||
}
|
||||
};
|
||||
|
||||
public static final class Decimal implements DocValueFormat {
|
||||
final class Decimal implements DocValueFormat {
|
||||
|
||||
public static final String NAME = "decimal";
|
||||
private static final DecimalFormatSymbols SYMBOLS = new DecimalFormatSymbols(Locale.ROOT);
|
||||
|
@ -146,7 +146,7 @@ public interface SearchHit extends Streamable, ToXContent, Iterable<SearchHitFie
|
||||
/**
|
||||
* The hit field matching the given field name.
|
||||
*/
|
||||
public SearchHitField field(String fieldName);
|
||||
SearchHitField field(String fieldName);
|
||||
|
||||
/**
|
||||
* A map of hit fields (from field name to hit fields) if additional fields
|
||||
@ -208,23 +208,23 @@ public interface SearchHit extends Streamable, ToXContent, Iterable<SearchHitFie
|
||||
/**
|
||||
* Encapsulates the nested identity of a hit.
|
||||
*/
|
||||
public interface NestedIdentity {
|
||||
interface NestedIdentity {
|
||||
|
||||
/**
|
||||
* Returns the nested field in the source this hit originates from
|
||||
*/
|
||||
public Text getField();
|
||||
Text getField();
|
||||
|
||||
/**
|
||||
* Returns the offset in the nested array of objects in the source this hit
|
||||
*/
|
||||
public int getOffset();
|
||||
int getOffset();
|
||||
|
||||
/**
|
||||
* Returns the next child nested level if there is any, otherwise <code>null</code> is returned.
|
||||
*
|
||||
* In the case of mappings with multiple levels of nested object fields
|
||||
*/
|
||||
public NestedIdentity getChild();
|
||||
NestedIdentity getChild();
|
||||
}
|
||||
}
|
||||
|
@ -62,5 +62,5 @@ public interface SearchHits extends Streamable, ToXContent, Iterable<SearchHit>
|
||||
/**
|
||||
* The hits of the search request (based on the search type, and from / size provided).
|
||||
*/
|
||||
public SearchHit[] getHits();
|
||||
SearchHit[] getHits();
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ public class AggregationStreams {
|
||||
/**
|
||||
* A stream that knows how to read an aggregation from the input.
|
||||
*/
|
||||
public static interface Stream {
|
||||
public interface Stream {
|
||||
InternalAggregation readResult(StreamInput in) throws IOException;
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@ public class BucketStreams {
|
||||
/**
|
||||
* A stream that knows how to read a bucket from the input.
|
||||
*/
|
||||
public static interface Stream<B extends MultiBucketsAggregation.Bucket> {
|
||||
public interface Stream<B extends MultiBucketsAggregation.Bucket> {
|
||||
B readResult(StreamInput in, BucketStreamContext context) throws IOException;
|
||||
BucketStreamContext getBucketStreamContext(B bucket);
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ public interface Filters extends MultiBucketsAggregation {
|
||||
/**
|
||||
* A bucket associated with a specific filter (identified by its key)
|
||||
*/
|
||||
public static interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -32,7 +32,7 @@ public interface Histogram extends MultiBucketsAggregation {
|
||||
/**
|
||||
* A bucket in the histogram where documents fall in
|
||||
*/
|
||||
static interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
|
||||
}
|
||||
|
||||
@ -46,7 +46,7 @@ public interface Histogram extends MultiBucketsAggregation {
|
||||
/**
|
||||
* A strategy defining the order in which the buckets in this histogram are ordered.
|
||||
*/
|
||||
abstract static class Order implements ToXContent {
|
||||
abstract class Order implements ToXContent {
|
||||
|
||||
public static final Order KEY_ASC = new InternalOrder((byte) 1, "_key", true, new Comparator<InternalHistogram.Bucket>() {
|
||||
@Override
|
||||
|
@ -31,7 +31,7 @@ public interface Range extends MultiBucketsAggregation {
|
||||
/**
|
||||
* A bucket associated with a specific range
|
||||
*/
|
||||
public static interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
|
||||
/**
|
||||
* @return The lower bound of the range
|
||||
|
@ -305,7 +305,7 @@ public class RangeAggregator extends BucketsAggregator {
|
||||
};
|
||||
}
|
||||
|
||||
private final long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) {
|
||||
private long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) {
|
||||
return owningBucketOrdinal * ranges.length + rangeOrd;
|
||||
}
|
||||
|
||||
@ -337,7 +337,7 @@ public class RangeAggregator extends BucketsAggregator {
|
||||
return rangeFactory.create(name, buckets, format, keyed, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
private static final void sortRanges(final Range[] ranges) {
|
||||
private static void sortRanges(final Range[] ranges) {
|
||||
new InPlaceMergeSorter() {
|
||||
|
||||
@Override
|
||||
|
@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
||||
public interface NumericMetricsAggregation extends Aggregation {
|
||||
|
||||
public static interface SingleValue extends NumericMetricsAggregation {
|
||||
interface SingleValue extends NumericMetricsAggregation {
|
||||
|
||||
double value();
|
||||
|
||||
@ -31,6 +31,6 @@ public interface NumericMetricsAggregation extends Aggregation {
|
||||
|
||||
}
|
||||
|
||||
public static interface MultiValue extends NumericMetricsAggregation {
|
||||
interface MultiValue extends NumericMetricsAggregation {
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,6 @@ public interface Cardinality extends NumericMetricsAggregation.SingleValue {
|
||||
/**
|
||||
* The number of unique terms.
|
||||
*/
|
||||
public long getValue();
|
||||
long getValue();
|
||||
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
*/
|
||||
public interface PercentileRanks extends NumericMetricsAggregation.MultiValue, Iterable<Percentile> {
|
||||
|
||||
public static final String TYPE_NAME = "percentile_ranks";
|
||||
String TYPE_NAME = "percentile_ranks";
|
||||
|
||||
/**
|
||||
* Return the percentile for the given value.
|
||||
|
@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
*/
|
||||
public interface Percentiles extends NumericMetricsAggregation.MultiValue, Iterable<Percentile> {
|
||||
|
||||
public static final String TYPE_NAME = "percentiles";
|
||||
String TYPE_NAME = "percentiles";
|
||||
|
||||
/**
|
||||
* Return the value associated with the provided percentile.
|
||||
|
@ -41,10 +41,10 @@ public abstract class PipelineAggregator implements Streamable, NamedWriteable {
|
||||
* Parse the {@link PipelineAggregationBuilder} from a {@link QueryParseContext}.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public static interface Parser {
|
||||
public static final ParseField BUCKETS_PATH = new ParseField("buckets_path");
|
||||
public static final ParseField FORMAT = new ParseField("format");
|
||||
public static final ParseField GAP_POLICY = new ParseField("gap_policy");
|
||||
public interface Parser {
|
||||
ParseField BUCKETS_PATH = new ParseField("buckets_path");
|
||||
ParseField FORMAT = new ParseField("format");
|
||||
ParseField GAP_POLICY = new ParseField("gap_policy");
|
||||
|
||||
/**
|
||||
* Returns the pipeline aggregator factory with which this parser is
|
||||
|
@ -41,70 +41,70 @@ public final class PipelineAggregatorBuilders {
|
||||
private PipelineAggregatorBuilders() {
|
||||
}
|
||||
|
||||
public static final DerivativePipelineAggregationBuilder derivative(String name, String bucketsPath) {
|
||||
public static DerivativePipelineAggregationBuilder derivative(String name, String bucketsPath) {
|
||||
return new DerivativePipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MaxBucketPipelineAggregationBuilder maxBucket(String name, String bucketsPath) {
|
||||
public static MaxBucketPipelineAggregationBuilder maxBucket(String name, String bucketsPath) {
|
||||
return new MaxBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MinBucketPipelineAggregationBuilder minBucket(String name, String bucketsPath) {
|
||||
public static MinBucketPipelineAggregationBuilder minBucket(String name, String bucketsPath) {
|
||||
return new MinBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final AvgBucketPipelineAggregationBuilder avgBucket(String name, String bucketsPath) {
|
||||
public static AvgBucketPipelineAggregationBuilder avgBucket(String name, String bucketsPath) {
|
||||
return new AvgBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final SumBucketPipelineAggregationBuilder sumBucket(String name, String bucketsPath) {
|
||||
public static SumBucketPipelineAggregationBuilder sumBucket(String name, String bucketsPath) {
|
||||
return new SumBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final StatsBucketPipelineAggregationBuilder statsBucket(String name, String bucketsPath) {
|
||||
public static StatsBucketPipelineAggregationBuilder statsBucket(String name, String bucketsPath) {
|
||||
return new StatsBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final ExtendedStatsBucketPipelineAggregationBuilder extendedStatsBucket(String name,
|
||||
public static ExtendedStatsBucketPipelineAggregationBuilder extendedStatsBucket(String name,
|
||||
String bucketsPath) {
|
||||
return new ExtendedStatsBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final PercentilesBucketPipelineAggregationBuilder percentilesBucket(String name,
|
||||
public static PercentilesBucketPipelineAggregationBuilder percentilesBucket(String name,
|
||||
String bucketsPath) {
|
||||
return new PercentilesBucketPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MovAvgPipelineAggregationBuilder movingAvg(String name, String bucketsPath) {
|
||||
public static MovAvgPipelineAggregationBuilder movingAvg(String name, String bucketsPath) {
|
||||
return new MovAvgPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final BucketScriptPipelineAggregationBuilder bucketScript(String name,
|
||||
public static BucketScriptPipelineAggregationBuilder bucketScript(String name,
|
||||
Map<String, String> bucketsPathsMap, Script script) {
|
||||
return new BucketScriptPipelineAggregationBuilder(name, bucketsPathsMap, script);
|
||||
}
|
||||
|
||||
public static final BucketScriptPipelineAggregationBuilder bucketScript(String name, Script script,
|
||||
public static BucketScriptPipelineAggregationBuilder bucketScript(String name, Script script,
|
||||
String... bucketsPaths) {
|
||||
return new BucketScriptPipelineAggregationBuilder(name, script, bucketsPaths);
|
||||
}
|
||||
|
||||
public static final BucketSelectorPipelineAggregationBuilder bucketSelector(String name,
|
||||
public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name,
|
||||
Map<String, String> bucketsPathsMap, Script script) {
|
||||
return new BucketSelectorPipelineAggregationBuilder(name, bucketsPathsMap, script);
|
||||
}
|
||||
|
||||
public static final BucketSelectorPipelineAggregationBuilder bucketSelector(String name, Script script,
|
||||
public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name, Script script,
|
||||
String... bucketsPaths) {
|
||||
return new BucketSelectorPipelineAggregationBuilder(name, script, bucketsPaths);
|
||||
}
|
||||
|
||||
public static final CumulativeSumPipelineAggregationBuilder cumulativeSum(String name,
|
||||
public static CumulativeSumPipelineAggregationBuilder cumulativeSum(String name,
|
||||
String bucketsPath) {
|
||||
return new CumulativeSumPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final SerialDiffPipelineAggregationBuilder diff(String name, String bucketsPath) {
|
||||
public static SerialDiffPipelineAggregationBuilder diff(String name, String bucketsPath) {
|
||||
return new SerialDiffPipelineAggregationBuilder(name, bucketsPath);
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ public class PipelineAggregatorStreams {
|
||||
/**
|
||||
* A stream that knows how to read an aggregation from the input.
|
||||
*/
|
||||
public static interface Stream {
|
||||
public interface Stream {
|
||||
PipelineAggregator readResult(StreamInput in) throws IOException;
|
||||
}
|
||||
|
||||
|
@ -27,5 +27,5 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
*/
|
||||
public interface MovAvgModelBuilder extends ToXContent {
|
||||
|
||||
public MovAvgModel build();
|
||||
MovAvgModel build();
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ public interface Rescorer {
|
||||
/**
|
||||
* Returns the name of this rescorer
|
||||
*/
|
||||
public String name();
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Modifies the result of the previously executed search ({@link TopDocs})
|
||||
@ -48,7 +48,7 @@ public interface Rescorer {
|
||||
* @param rescoreContext the {@link RescoreSearchContext}. This will never be <code>null</code>
|
||||
* @throws IOException if an {@link IOException} occurs during rescoring
|
||||
*/
|
||||
public TopDocs rescore(TopDocs topDocs, SearchContext context, RescoreSearchContext rescoreContext) throws IOException;
|
||||
TopDocs rescore(TopDocs topDocs, SearchContext context, RescoreSearchContext rescoreContext) throws IOException;
|
||||
|
||||
/**
|
||||
* Executes an {@link Explanation} phase on the rescorer.
|
||||
@ -60,8 +60,8 @@ public interface Rescorer {
|
||||
* @return the explain for the given top level document ID.
|
||||
* @throws IOException if an {@link IOException} occurs
|
||||
*/
|
||||
public Explanation explain(int topLevelDocId, SearchContext context, RescoreSearchContext rescoreContext,
|
||||
Explanation sourceExplanation) throws IOException;
|
||||
Explanation explain(int topLevelDocId, SearchContext context, RescoreSearchContext rescoreContext,
|
||||
Explanation sourceExplanation) throws IOException;
|
||||
|
||||
/**
|
||||
* Extracts all terms needed to execute this {@link Rescorer}. This method
|
||||
@ -69,7 +69,7 @@ public interface Rescorer {
|
||||
* {@link SearchType#DFS_QUERY_AND_FETCH} and
|
||||
* {@link SearchType#DFS_QUERY_THEN_FETCH}
|
||||
*/
|
||||
public void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set<Term> termsSet);
|
||||
void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set<Term> termsSet);
|
||||
|
||||
/*
|
||||
* TODO: At this point we only have one implementation which modifies the
|
||||
|
@ -23,11 +23,8 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
|
||||
import org.apache.lucene.util.AttributeImpl;
|
||||
import org.apache.lucene.util.AttributeReflector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.apache.lucene.util.IntsRef;
|
||||
import org.apache.lucene.util.fst.Util;
|
||||
|
||||
@ -108,8 +105,8 @@ public final class CompletionTokenStream extends TokenStream {
|
||||
input.close();
|
||||
}
|
||||
|
||||
public static interface ToFiniteStrings {
|
||||
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException;
|
||||
public interface ToFiniteStrings {
|
||||
Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -129,48 +126,8 @@ public final class CompletionTokenStream extends TokenStream {
|
||||
/**
|
||||
* Return the builder from which the term is derived.
|
||||
*/
|
||||
public BytesRefBuilder builder();
|
||||
BytesRefBuilder builder();
|
||||
|
||||
public CharSequence toUTF16();
|
||||
}
|
||||
|
||||
public static final class ByteTermAttributeImpl extends AttributeImpl implements ByteTermAttribute, TermToBytesRefAttribute {
|
||||
private final BytesRefBuilder bytes = new BytesRefBuilder();
|
||||
private CharsRefBuilder charsRef;
|
||||
|
||||
@Override
|
||||
public BytesRefBuilder builder() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getBytesRef() {
|
||||
return bytes.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
bytes.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reflectWith(AttributeReflector reflector) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyTo(AttributeImpl target) {
|
||||
ByteTermAttributeImpl other = (ByteTermAttributeImpl) target;
|
||||
other.bytes.copyBytes(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CharSequence toUTF16() {
|
||||
if (charsRef == null) {
|
||||
charsRef = new CharsRefBuilder();
|
||||
}
|
||||
charsRef.copyUTF8Bytes(getBytesRef());
|
||||
return charsRef.get();
|
||||
}
|
||||
CharSequence toUTF16();
|
||||
}
|
||||
}
|
||||
|
@ -323,7 +323,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||
return new GeoQuery(name, geohash, precisions);
|
||||
}
|
||||
|
||||
private static final int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException {
|
||||
private static int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException {
|
||||
switch (parser.currentToken()) {
|
||||
case VALUE_STRING:
|
||||
return GeoUtils.geoHashLevelsForPrecision(parser.text());
|
||||
|
@ -428,7 +428,7 @@ public final class DirectCandidateGeneratorBuilder implements CandidateGenerator
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
public String toString() {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
@ -440,14 +440,14 @@ public final class DirectCandidateGeneratorBuilder implements CandidateGenerator
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, preFilter, postFilter, suggestMode, accuracy,
|
||||
size, sort, stringDistance, maxEdits, maxInspections,
|
||||
maxTermFreq, prefixLength, minWordLength, minDocFreq);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(Object obj) {
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public final class Laplace extends SmoothingModel {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(alpha);
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ public final class LinearInterpolation extends SmoothingModel {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
|
||||
}
|
||||
|
||||
|
@ -54,16 +54,10 @@ public final class MultiCandidateGeneratorWrapper extends CandidateGenerator {
|
||||
return reduce(set, numCandidates);
|
||||
}
|
||||
|
||||
private final CandidateSet reduce(CandidateSet set, int numCandidates) {
|
||||
private CandidateSet reduce(CandidateSet set, int numCandidates) {
|
||||
if (set.candidates.length > numCandidates) {
|
||||
Candidate[] candidates = set.candidates;
|
||||
Arrays.sort(candidates, new Comparator<Candidate>() {
|
||||
|
||||
@Override
|
||||
public int compare(Candidate left, Candidate right) {
|
||||
return Double.compare(right.score, left.score);
|
||||
}
|
||||
});
|
||||
Arrays.sort(candidates, (left, right) -> Double.compare(right.score, left.score));
|
||||
Candidate[] newSet = new Candidate[numCandidates];
|
||||
System.arraycopy(candidates, 0, newSet, 0, numCandidates);
|
||||
set.candidates = newSet;
|
||||
|
@ -102,7 +102,7 @@ public final class StupidBackoff extends SmoothingModel {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(discount);
|
||||
}
|
||||
|
||||
|
@ -50,22 +50,22 @@ public final class TcpTransportChannel<Channel> implements TransportChannel {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String getProfileName() {
|
||||
public String getProfileName() {
|
||||
return profileName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String action() {
|
||||
public String action() {
|
||||
return this.action;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void sendResponse(TransportResponse response) throws IOException {
|
||||
public void sendResponse(TransportResponse response) throws IOException {
|
||||
sendResponse(response, TransportResponseOptions.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
|
||||
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
|
||||
release();
|
||||
transport.sendResponse(version, channel, response, requestId, action, options);
|
||||
|
||||
@ -86,12 +86,12 @@ public final class TcpTransportChannel<Channel> implements TransportChannel {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long getRequestId() {
|
||||
public long getRequestId() {
|
||||
return requestId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String getChannelType() {
|
||||
public String getChannelType() {
|
||||
return channelType;
|
||||
}
|
||||
|
||||
|
@ -76,13 +76,13 @@ public class SloppyMathTests extends ESTestCase {
|
||||
assertThat("distance between("+lat1+", "+lon1+") and ("+lat2+", "+lon2+"))", sloppy, closeTo(accurate, maxError(accurate)));
|
||||
}
|
||||
|
||||
private static final double randomLatitude() {
|
||||
private static double randomLatitude() {
|
||||
// crop pole areas, sine we now there the function
|
||||
// is not accurate around lat(89°, 90°) and lat(-90°, -89°)
|
||||
return (random().nextDouble() - 0.5) * 178.0;
|
||||
}
|
||||
|
||||
private static final double randomLongitude() {
|
||||
private static double randomLongitude() {
|
||||
return (random().nextDouble() - 0.5) * 360.0;
|
||||
}
|
||||
}
|
||||
|
@ -447,12 +447,12 @@ public class TransportActionFilterChainTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private static interface RequestCallback {
|
||||
private interface RequestCallback {
|
||||
<Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
|
||||
ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain);
|
||||
}
|
||||
|
||||
private static interface ResponseCallback {
|
||||
private interface ResponseCallback {
|
||||
<Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
|
||||
ActionFilterChain<?, Response> chain);
|
||||
}
|
||||
|
@ -407,7 +407,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase {
|
||||
return clusterState;
|
||||
}
|
||||
|
||||
private final void assertRecoveryNodeVersions(RoutingNodes routingNodes) {
|
||||
private void assertRecoveryNodeVersions(RoutingNodes routingNodes) {
|
||||
logger.trace("RoutingNodes: {}", routingNodes.prettyPrint());
|
||||
|
||||
List<ShardRouting> mutableShardRoutings = routingNodes.shardsWithState(ShardRoutingState.RELOCATING);
|
||||
|
@ -104,7 +104,7 @@ public class NettyHttpClient implements Closeable {
|
||||
}
|
||||
|
||||
@SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods.
|
||||
private final Collection<HttpResponse> processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple<String,
|
||||
final Collection<HttpResponse> processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple<String,
|
||||
CharSequence>... urisAndBodies) throws InterruptedException {
|
||||
Collection<HttpRequest> requests = new ArrayList<>(urisAndBodies.length);
|
||||
for (Tuple<String, CharSequence> uriAndBody : urisAndBodies) {
|
||||
|
@ -646,12 +646,12 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
||||
return shardRouting;
|
||||
}
|
||||
|
||||
private static final boolean isPerCommitFile(String fileName) {
|
||||
private static boolean isPerCommitFile(String fileName) {
|
||||
// .liv and segments_N are per commit files and might change after corruption
|
||||
return fileName.startsWith("segments") || fileName.endsWith(".liv");
|
||||
}
|
||||
|
||||
private static final boolean isPerSegmentFile(String fileName) {
|
||||
private static boolean isPerSegmentFile(String fileName) {
|
||||
return isPerCommitFile(fileName) == false;
|
||||
}
|
||||
|
||||
|
@ -142,7 +142,7 @@ public class RestFilterChainTests extends ESTestCase {
|
||||
assertThat(fakeRestChannel.errors().get(), equalTo(additionalContinueCount));
|
||||
}
|
||||
|
||||
private static enum Operation implements Callback {
|
||||
private enum Operation implements Callback {
|
||||
CONTINUE_PROCESSING {
|
||||
@Override
|
||||
public void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception {
|
||||
@ -157,7 +157,7 @@ public class RestFilterChainTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private static interface Callback {
|
||||
private interface Callback {
|
||||
void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception;
|
||||
}
|
||||
|
||||
|
@ -623,7 +623,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
||||
assertFirstHit(searchResponse, hasId("ultimate1"));
|
||||
}
|
||||
|
||||
private static final void assertEquivalent(String query, SearchResponse left, SearchResponse right) {
|
||||
private static void assertEquivalent(String query, SearchResponse left, SearchResponse right) {
|
||||
assertNoFailures(left);
|
||||
assertNoFailures(right);
|
||||
SearchHits leftHits = left.getHits();
|
||||
|
@ -65,7 +65,7 @@ public class SearchAfterBuilderTests extends ESTestCase {
|
||||
indicesQueriesRegistry = null;
|
||||
}
|
||||
|
||||
private final SearchAfterBuilder randomSearchFromBuilder() throws IOException {
|
||||
private SearchAfterBuilder randomSearchFromBuilder() throws IOException {
|
||||
int numSearchFrom = randomIntBetween(1, 10);
|
||||
SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder();
|
||||
Object[] values = new Object[numSearchFrom];
|
||||
@ -112,7 +112,7 @@ public class SearchAfterBuilderTests extends ESTestCase {
|
||||
// ensure that every number type remain the same before/after xcontent (de)serialization.
|
||||
// This is not a problem because the final type of each field value is extracted from associated sort field.
|
||||
// This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
|
||||
private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException {
|
||||
private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException {
|
||||
int numSearchAfter = randomIntBetween(1, 10);
|
||||
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
|
||||
jsonBuilder.startObject();
|
||||
|
@ -88,7 +88,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||
indicesQueriesRegistry = null;
|
||||
}
|
||||
|
||||
private final SliceBuilder randomSliceBuilder() throws IOException {
|
||||
private SliceBuilder randomSliceBuilder() throws IOException {
|
||||
int max = randomIntBetween(2, MAX_SLICE);
|
||||
int id = randomInt(max - 1);
|
||||
String field = randomAsciiOfLengthBetween(5, 20);
|
||||
|
@ -165,8 +165,8 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||
return factory;
|
||||
}
|
||||
|
||||
private final void parseMissingAndAdd(final String aggregationName, final String currentFieldName,
|
||||
XContentParser parser, final Map<String, Object> missing) throws IOException {
|
||||
private void parseMissingAndAdd(final String aggregationName, final String currentFieldName,
|
||||
XContentParser parser, final Map<String, Object> missing) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == null) {
|
||||
token = parser.nextToken();
|
||||
|
@ -93,7 +93,7 @@ public final class ConvertProcessor extends AbstractProcessor {
|
||||
};
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ public final class CompilerSettings {
|
||||
* in a script before an exception is thrown. This attempts to prevent infinite loops. Note if
|
||||
* the counter is set to 0, no loop counter will be written.
|
||||
*/
|
||||
public final int getMaxLoopCounter() {
|
||||
public int getMaxLoopCounter() {
|
||||
return maxLoopCounter;
|
||||
}
|
||||
|
||||
@ -68,7 +68,7 @@ public final class CompilerSettings {
|
||||
* Set the cumulative total number of statements that can be made in all loops.
|
||||
* @see #getMaxLoopCounter
|
||||
*/
|
||||
public final void setMaxLoopCounter(int max) {
|
||||
public void setMaxLoopCounter(int max) {
|
||||
this.maxLoopCounter = max;
|
||||
}
|
||||
|
||||
|
@ -646,7 +646,7 @@ public final class Definition {
|
||||
}
|
||||
}
|
||||
|
||||
private final void addStruct(final String name, final Class<?> clazz) {
|
||||
private void addStruct(final String name, final Class<?> clazz) {
|
||||
if (!name.matches("^[_a-zA-Z][\\.,_a-zA-Z0-9]*$")) {
|
||||
throw new IllegalArgumentException("Invalid struct name [" + name + "].");
|
||||
}
|
||||
@ -661,7 +661,7 @@ public final class Definition {
|
||||
simpleTypesMap.put(name, getTypeInternal(name));
|
||||
}
|
||||
|
||||
private final void addConstructorInternal(final String struct, final String name, final Type[] args) {
|
||||
private void addConstructorInternal(final String struct, final String name, final Type[] args) {
|
||||
final Struct owner = structsMap.get(struct);
|
||||
|
||||
if (owner == null) {
|
||||
@ -734,7 +734,7 @@ public final class Definition {
|
||||
* </ul>
|
||||
* no spaces allowed.
|
||||
*/
|
||||
private final void addSignature(String className, String signature) {
|
||||
private void addSignature(String className, String signature) {
|
||||
String elements[] = signature.split("\u0020");
|
||||
if (elements.length != 2) {
|
||||
throw new IllegalArgumentException("Malformed signature: " + signature);
|
||||
@ -774,8 +774,8 @@ public final class Definition {
|
||||
}
|
||||
}
|
||||
|
||||
private final void addMethodInternal(String struct, String name, boolean augmentation,
|
||||
Type rtn, Type[] args) {
|
||||
private void addMethodInternal(String struct, String name, boolean augmentation,
|
||||
Type rtn, Type[] args) {
|
||||
final Struct owner = structsMap.get(struct);
|
||||
|
||||
if (owner == null) {
|
||||
@ -858,7 +858,7 @@ public final class Definition {
|
||||
}
|
||||
}
|
||||
|
||||
private final void addFieldInternal(String struct, String name, Type type) {
|
||||
private void addFieldInternal(String struct, String name, Type type) {
|
||||
final Struct owner = structsMap.get(struct);
|
||||
|
||||
if (owner == null) {
|
||||
|
@ -261,7 +261,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri
|
||||
}
|
||||
|
||||
/** Evaluates with reduced privileges */
|
||||
private final PyObject evalRestricted(final PyCode code) {
|
||||
private PyObject evalRestricted(final PyCode code) {
|
||||
// eval the script with reduced privileges
|
||||
return AccessController.doPrivileged(new PrivilegedAction<PyObject>() {
|
||||
@Override
|
||||
|
@ -62,18 +62,18 @@ public class ThrowingLeafReaderWrapper extends FilterLeafReader {
|
||||
* A callback interface that allows to throw certain exceptions for
|
||||
* methods called on the IndexReader that is wrapped by {@link ThrowingLeafReaderWrapper}
|
||||
*/
|
||||
public static interface Thrower {
|
||||
public interface Thrower {
|
||||
/**
|
||||
* Maybe throws an exception ;)
|
||||
*/
|
||||
public void maybeThrow(Flags flag) throws IOException;
|
||||
void maybeThrow(Flags flag) throws IOException;
|
||||
|
||||
/**
|
||||
* If this method returns true the {@link Terms} instance for the given field
|
||||
* is wrapped with Thrower support otherwise no exception will be thrown for
|
||||
* the current {@link Terms} instance or any other instance obtained from it.
|
||||
*/
|
||||
public boolean wrapTerms(String field);
|
||||
boolean wrapTerms(String field);
|
||||
}
|
||||
|
||||
public ThrowingLeafReaderWrapper(LeafReader in, Thrower thrower) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user