Fix line length for org.elasticsearch.common.* files (#34888)

This removes the checkstyle suppressions for things in the `common` package.

Relates to #34884
This commit is contained in:
Lee Hinman 2018-10-26 08:47:39 -06:00 committed by GitHub
parent a39a67cd38
commit af28d1f648
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 233 additions and 161 deletions

View File

@ -164,29 +164,6 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocationCommands.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]MoveAllocationCommand.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]decider[/\\]AllocationDeciders.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]Numbers.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]blobstore[/\\]fs[/\\]FsBlobStore.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]bytes[/\\]BytesArray.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]bytes[/\\]PagedBytesReference.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cache[/\\]Cache.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]collect[/\\]ImmutableOpenIntMap.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]DefaultConstructionProxyFactory.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]internal[/\\]ConstructionContext.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]multibindings[/\\]MapBinder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]spi[/\\]InjectionPoint.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]io[/\\]Channels.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]joda[/\\]Joda.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]XMoreLikeThis.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]Cidrs.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]NetworkService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]recycler[/\\]Recyclers.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArrays.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]CancellableThreads.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]CollectionUtils.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsExecutors.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadBarrier.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadContext.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentHelper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]Discovery.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoverySettings.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscovery.java" checks="LineLength" />
@ -407,17 +384,6 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]settings[/\\]ClusterSettingsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]shards[/\\]ClusterSearchShardsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]structure[/\\]RoutingIteratorTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]breaker[/\\]MemoryCircuitBreakerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]geo[/\\]ShapeBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]hash[/\\]MessageDigestsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]CidrsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]DistanceUnitTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]FuzzinessTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]LongObjectHashMapTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsExecutorsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedExecutorsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]builder[/\\]XContentBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]support[/\\]filtering[/\\]FilterPathGeneratorFilteringTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]BlockingClusterStatePublishResponseHandlerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscoveryUnitTests.java" checks="LineLength" />

View File

@ -61,7 +61,8 @@ public final class Numbers {
}
public static int bytesToInt(BytesRef bytes) {
return (bytes.bytes[bytes.offset] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
return (bytes.bytes[bytes.offset] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) |
((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
}
/**
@ -77,8 +78,10 @@ public final class Numbers {
}
public static long bytesToLong(BytesRef bytes) {
int high = (bytes.bytes[bytes.offset + 0] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
int low = (bytes.bytes[bytes.offset + 4] << 24) | ((bytes.bytes[bytes.offset + 5] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 6] & 0xff) << 8) | (bytes.bytes[bytes.offset + 7] & 0xff);
int high = (bytes.bytes[bytes.offset + 0] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) |
((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
int low = (bytes.bytes[bytes.offset + 4] << 24) | ((bytes.bytes[bytes.offset + 5] & 0xff) << 16) |
((bytes.bytes[bytes.offset + 6] & 0xff) << 8) | (bytes.bytes[bytes.offset + 7] & 0xff);
return (((long) high) << 32) | (low & 0x0ffffffffL);
}

View File

@ -48,7 +48,8 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
if (!this.readOnly) {
Files.createDirectories(path);
}
this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size",
new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
}
@Override

View File

@ -68,7 +68,8 @@ public final class BytesArray extends BytesReference {
@Override
public BytesReference slice(int from, int length) {
if (from < 0 || (from + length) > this.length) {
throw new IllegalArgumentException("can't slice a buffer with length [" + this.length + "], with slice parameters from [" + from + "], length [" + length + "]");
throw new IllegalArgumentException("can't slice a buffer with length [" + this.length +
"], with slice parameters from [" + from + "], length [" + length + "]");
}
return new BytesArray(bytes, offset + from, length);
}

View File

@ -63,7 +63,8 @@ public class PagedBytesReference extends BytesReference {
@Override
public BytesReference slice(int from, int length) {
if (from < 0 || (from + length) > length()) {
throw new IllegalArgumentException("can't slice a buffer with length [" + length() + "], with slice parameters from [" + from + "], length [" + length + "]");
throw new IllegalArgumentException("can't slice a buffer with length [" + length() +
"], with slice parameters from [" + from + "], length [" + length + "]");
}
return new PagedBytesReference(bigarrays, byteArray, offset + from, length);
}

View File

@ -485,7 +485,8 @@ public class Cache<K, V> {
promote(tuple.v1(), now);
}
if (replaced) {
removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED));
removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value,
RemovalNotification.RemovalReason.REPLACED));
}
}

View File

@ -39,8 +39,8 @@ import java.util.Map;
/**
* An immutable map implementation based on open hash map.
* <p>
* Can be constructed using a {@link #builder()}, or using {@link #builder(org.elasticsearch.common.collect.ImmutableOpenIntMap)} (which is an optimized
* option to copy over existing content and modify it).
* Can be constructed using a {@link #builder()}, or using {@link #builder(org.elasticsearch.common.collect.ImmutableOpenIntMap)}
* (which is an optimized option to copy over existing content and modify it).
*/
public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> {

View File

@ -50,7 +50,8 @@ class DefaultConstructionProxyFactory<T> implements ConstructionProxyFactory<T>
} catch (InstantiationException e) {
throw new AssertionError(e); // shouldn't happen, we know this is a concrete type
} catch (IllegalAccessException e) {
throw new AssertionError("Wrong access modifiers on " + constructor, e); // a security manager is blocking us, we're hosed
// a security manager is blocking us, we're hosed
throw new AssertionError("Wrong access modifiers on " + constructor, e);
}
}

View File

@ -79,7 +79,8 @@ public class ConstructionContext<T> {
// ES: Replace, since we don't use bytecode gen, just get the type class loader, or system if its null
//ClassLoader classLoader = BytecodeGen.getClassLoader(expectedType);
ClassLoader classLoader = expectedType.getClassLoader() == null ? ClassLoader.getSystemClassLoader() : expectedType.getClassLoader();
ClassLoader classLoader = expectedType.getClassLoader() == null ?
ClassLoader.getSystemClassLoader() : expectedType.getClassLoader();
return expectedType.cast(Proxy.newProxyInstance(classLoader,
new Class[]{expectedType}, invocationHandler));
}

View File

@ -275,7 +275,8 @@ public abstract class MapBinder<K, V> {
private final Provider<Set<Entry<K, Provider<V>>>> provider;
@SuppressWarnings("rawtypes") // code is silly stupid with generics
MapBinderProviderWithDependencies(RealMapBinder binder, Set<Dependency<?>> dependencies, Provider<Set<Entry<K, Provider<V>>>> provider) {
MapBinderProviderWithDependencies(RealMapBinder binder, Set<Dependency<?>> dependencies,
Provider<Set<Entry<K, Provider<V>>>> provider) {
this.binder = binder;
this.dependencies = dependencies;
this.provider = provider;
@ -315,7 +316,8 @@ public abstract class MapBinder<K, V> {
// binds a Map<K, Provider<V>> from a collection of Map<Entry<K, Provider<V>>
final Provider<Set<Entry<K, Provider<V>>>> entrySetProvider = binder
.getProvider(entrySetBinder.getSetKey());
binder.bind(providerMapKey).toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, dependencies, entrySetProvider));
binder.bind(providerMapKey)
.toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, dependencies, entrySetProvider));
final Provider<Map<K, Provider<V>>> mapProvider = binder.getProvider(providerMapKey);
binder.bind(mapKey).toProvider(new ProviderWithDependencies<Map<K, V>>() {

View File

@ -345,7 +345,8 @@ public final class InjectionPoint {
}
private static <M extends Member & AnnotatedElement> void addInjectionPoints(TypeLiteral<?> type,
Factory<M> factory, boolean statics, Collection<InjectionPoint> injectionPoints,
Factory<M> factory, boolean statics,
Collection<InjectionPoint> injectionPoints,
Errors errors) {
if (type.getType() == Object.class) {
return;

View File

@ -62,7 +62,8 @@ public final class Channels {
* @param destOffset offset in dest to read into
* @param length number of bytes to read
*/
public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition, byte[] dest, int destOffset, int length) throws IOException {
public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition,
byte[] dest, int destOffset, int length) throws IOException {
int read = readFromFileChannel(channel, channelPosition, dest, destOffset, length);
if (read < 0) {
throw new EOFException("read past EOF. pos [" + channelPosition + "] length: [" + length + "] end: [" + channel.size() + "]");
@ -80,7 +81,8 @@ public final class Channels {
* @return total bytes read or -1 if an attempt was made to read past EOF. The method always tries to read all the bytes
* that will fit in the destination byte buffer.
*/
public static int readFromFileChannel(FileChannel channel, long channelPosition, byte[] dest, int destOffset, int length) throws IOException {
public static int readFromFileChannel(FileChannel channel, long channelPosition, byte[] dest,
int destOffset, int length) throws IOException {
ByteBuffer buffer = ByteBuffer.wrap(dest, destOffset, length);
return readFromFileChannel(channel, channelPosition, buffer);
}
@ -97,7 +99,8 @@ public final class Channels {
public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition, ByteBuffer dest) throws IOException {
int read = readFromFileChannel(channel, channelPosition, dest);
if (read < 0) {
throw new EOFException("read past EOF. pos [" + channelPosition + "] length: [" + dest.limit() + "] end: [" + channel.size() + "]");
throw new EOFException("read past EOF. pos [" + channelPosition +
"] length: [" + dest.limit() + "] end: [" + channel.size() + "]");
}
}
@ -135,7 +138,8 @@ public final class Channels {
dest.position(tmpBuffer.position());
}
assert bytesRead == bytesToRead : "failed to read an entire buffer but also didn't get an EOF (read [" + bytesRead + "] needed [" + bytesToRead + "]";
assert bytesRead == bytesToRead : "failed to read an entire buffer but also didn't get an EOF (read [" +
bytesRead + "] needed [" + bytesToRead + "]";
return bytesRead;
}
}
@ -149,7 +153,8 @@ public final class Channels {
return read;
}
assert read > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte (FileChannel is in blocking mode, see spec of ReadableByteChannel)";
assert read > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte " +
"(FileChannel is in blocking mode, see spec of ReadableByteChannel)";
bytesRead += read;
channelPosition += read;

View File

@ -154,9 +154,11 @@ public class Joda {
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
formatter = ISODateTimeFormat.yearMonthDay();
} else if ("epoch_second".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter();
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false),
new EpochTimeParser(false)).toFormatter();
} else if ("epoch_millis".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter();
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true),
new EpochTimeParser(true)).toFormatter();
// strict date formats here, must be at least 4 digits for year and two for months and two for day
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
formatter = StrictISODateTimeFormat.basicWeekDate();
@ -245,7 +247,8 @@ public class Joda {
parsers[i] = currentParser.getParser();
}
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(dateTimeFormatter.withZone(DateTimeZone.UTC).getPrinter(), parsers);
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder()
.append(dateTimeFormatter.withZone(DateTimeZone.UTC).getPrinter(), parsers);
formatter = builder.toFormatter();
}
} else {
@ -286,9 +289,11 @@ public class Joda {
.toFormatter()
.withZoneUTC();
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(),
new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis",
builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
}
@ -312,7 +317,8 @@ public class Joda {
@Override
public DateTimeField getField(Chronology chronology) {
return new OffsetDateTimeField(new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
return new OffsetDateTimeField(
new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
}
};
@ -393,7 +399,8 @@ public class Joda {
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
public void printTo(Writer out, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) throws IOException {
public void printTo(Writer out, long instant, Chronology chrono, int displayOffset,
DateTimeZone displayZone, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.write(String.valueOf(instant - displayOffset));
} else {
@ -427,7 +434,8 @@ public class Joda {
int minuteOfHour = partial.get(DateTimeFieldType.minuteOfHour());
int secondOfMinute = partial.get(DateTimeFieldType.secondOfMinute());
int millisOfSecond = partial.get(DateTimeFieldType.millisOfSecond());
return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth, hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth,
hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
}
}
}

View File

@ -157,7 +157,8 @@ import java.util.Set;
public final class XMoreLikeThis {
// static {
// assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_4_9: "Remove this class once we upgrade to Lucene 5.0";
// assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_4_9:
// "Remove this class once we upgrade to Lucene 5.0";
// }
/**

View File

@ -40,13 +40,15 @@ public final class Cidrs {
String[] fields = cidr.split("/");
if (fields.length != 2) {
throw new IllegalArgumentException(
String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] but was [%s] after splitting on \"/\" in [%s]",
Arrays.toString(fields), cidr)
);
}
// do not try to parse IPv4-mapped IPv6 address
if (fields[0].contains(":")) {
throw new IllegalArgumentException(
String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] where a, b, c, d are decimal octets but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] where a, b, c, d are decimal octets " +
"but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
);
}
byte[] addressBytes;

View File

@ -113,7 +113,8 @@ public final class NetworkService {
}
// check if its a wildcard address: this is only ok if its the only address!
if (address.isAnyLocalAddress() && addresses.length > 1) {
throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense");
throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) +
"} is wildcard, but multiple addresses specified: this makes no sense");
}
}
return addresses;
@ -156,12 +157,14 @@ public final class NetworkService {
for (InetAddress address : addresses) {
// check if its multicast: flat out mistake
if (address.isMulticastAddress()) {
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: multicast address");
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) +
"} is invalid: multicast address");
}
// check if its a wildcard address: this is only ok if its the only address!
// (if it was a single wildcard address, it was replaced by step 1 above)
if (address.isAnyLocalAddress()) {
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense");
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) +
"} is wildcard, but multiple addresses specified: this makes no sense");
}
}

View File

@ -148,7 +148,8 @@ public enum Recyclers {
}
/**
* Create a concurrent implementation that can support concurrent access from <code>concurrencyLevel</code> threads with little contention.
* Create a concurrent implementation that can support concurrent access from
* <code>concurrencyLevel</code> threads with little contention.
*/
public static <T> Recycler<T> concurrent(final Recycler.Factory<T> factory, final int concurrencyLevel) {
if (concurrencyLevel < 1) {

View File

@ -44,7 +44,8 @@ public class BigArrays implements Releasable {
public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Long.BYTES;
public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF;
/** Returns the next size to grow when working with parallel arrays that may have different page sizes or number of bytes per element. */
/** Returns the next size to grow when working with parallel arrays that
* may have different page sizes or number of bytes per element. */
public static long overSize(long minTargetSize) {
return overSize(minTargetSize, PAGE_SIZE_IN_BYTES / 8, 1);
}
@ -345,7 +346,8 @@ public class BigArrays implements Releasable {
@Override
public long ramBytesUsed() {
return SHALLOW_SIZE + RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF * size());
return SHALLOW_SIZE + RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER +
RamUsageEstimator.NUM_BYTES_OBJECT_REF * size());
}
@SuppressWarnings("unchecked")
@ -503,7 +505,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public ByteArray grow(ByteArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@ -587,7 +590,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public IntArray grow(IntArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@ -638,7 +642,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public LongArray grow(LongArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@ -686,7 +691,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public DoubleArray grow(DoubleArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@ -734,7 +740,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public FloatArray grow(FloatArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@ -775,7 +782,8 @@ public class BigArrays implements Releasable {
}
}
/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
/** Grow an array to a size that is larger than <code>minSize</code>,
* preserving content, and potentially reusing part of the provided array. */
public <T> ObjectArray<T> grow(ObjectArray<T> array, long minSize) {
if (minSize <= array.size()) {
return array;

View File

@ -45,7 +45,8 @@ public class CancellableThreads {
}
/** call this will throw an exception if operation was cancelled. Override {@link #onCancel(String, Exception)} for custom failure logic */
/** call this will throw an exception if operation was cancelled.
* Override {@link #onCancel(String, Exception)} for custom failure logic */
public synchronized void checkForCancel() {
if (isCancelled()) {
onCancel(reason, null);

View File

@ -307,7 +307,8 @@ public class CollectionUtils {
sort(new BytesRefBuilder(), new BytesRefBuilder(), bytes, indices);
}
private static void sort(final BytesRefBuilder scratch, final BytesRefBuilder scratch1, final BytesRefArray bytes, final int[] indices) {
private static void sort(final BytesRefBuilder scratch, final BytesRefBuilder scratch1,
final BytesRefArray bytes, final int[] indices) {
final int numValues = bytes.size();
assert indices.length >= numValues;

View File

@ -59,25 +59,30 @@ public class EsExecutors {
return PROCESSORS_SETTING.get(settings);
}
public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory, ThreadContext contextHolder, ScheduledExecutorService timer) {
public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory,
ThreadContext contextHolder, ScheduledExecutorService timer) {
return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory, contextHolder, timer);
}
public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) {
public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit,
ThreadFactory threadFactory, ThreadContext contextHolder) {
ExecutorScalingQueue<Runnable> queue = new ExecutorScalingQueue<>();
EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
EsThreadPoolExecutor executor =
new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
queue.executor = executor;
return executor;
}
public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory, ThreadContext contextHolder) {
public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity,
ThreadFactory threadFactory, ThreadContext contextHolder) {
BlockingQueue<Runnable> queue;
if (queueCapacity < 0) {
queue = ConcurrentCollections.newBlockingQueue();
} else {
queue = new SizeBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(), queueCapacity);
}
return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy(), contextHolder);
return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS,
queue, threadFactory, new EsAbortPolicy(), contextHolder);
}
/**

View File

@ -56,14 +56,15 @@ import java.nio.charset.StandardCharsets;
/**
* A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with
* a thread. It allows to store and retrieve header information across method calls, network calls as well as threads spawned from a
* thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool} have out of the box
* support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread that it is forking from.".
* Network calls will also preserve the senders headers automatically.
* thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool}
* have out of the box support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread
* that it is forking from.". Network calls will also preserve the senders headers automatically.
* <p>
* Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by a thread pool or executor
* being responsible for stashing and restoring the threads context. For instance if a network request is received, all headers are deserialized from the network
* and directly added as the headers of the threads {@link ThreadContext} (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently
* active on this thread the network code uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread
* Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by
* a thread pool or executor being responsible for stashing and restoring the threads context. For instance if a network request is
* received, all headers are deserialized from the network and directly added as the headers of the threads {@link ThreadContext}
* (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently active on this thread the network code
* uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread
* is forked (which in turn inherits the context) it restores the previous context. For instance:
* </p>
* <pre>
@ -127,8 +128,9 @@ public final class ThreadContext implements Closeable, Writeable {
}
/**
* Removes the current context and resets a new context that contains a merge of the current headers and the given headers. The removed context can be
* restored when closing the returned {@link StoredContext}. The merge strategy is that headers that are already existing are preserved unless they are defaults.
* Removes the current context and resets a new context that contains a merge of the current headers and the given headers.
* The removed context can be restored when closing the returned {@link StoredContext}. The merge strategy is that headers
* that are already existing are preserved unless they are defaults.
*/
public StoredContext stashAndMergeHeaders(Map<String, String> headers) {
final ThreadContextStruct context = threadLocal.get();
@ -481,7 +483,8 @@ public final class ThreadContext implements Closeable, Writeable {
logger.warn("Dropping a warning header, as their total size reached the maximum allowed of ["
+ maxWarningHeaderSize + "] bytes set in ["
+ HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!");
return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize);
return new ThreadContextStruct(requestHeaders, responseHeaders,
transientHeaders, isSystemContext, newWarningHeaderSize);
}
}

View File

@ -42,7 +42,8 @@ public class XContentHelper {
/**
* Creates a parser based on the bytes provided
* @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, BytesReference, XContentType)} to avoid content type auto-detection
* @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, BytesReference, XContentType)}
* to avoid content type auto-detection
*/
@Deprecated
public static XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler,
@ -109,7 +110,8 @@ public class XContentHelper {
}
contentType = xContentType != null ? xContentType : XContentFactory.xContentType(input);
try (InputStream stream = input) {
return new Tuple<>(Objects.requireNonNull(contentType), convertToMap(XContentFactory.xContent(contentType), stream, ordered));
return new Tuple<>(Objects.requireNonNull(contentType),
convertToMap(XContentFactory.xContent(contentType), stream, ordered));
}
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
@ -294,7 +296,8 @@ public class XContentHelper {
* auto-detection
*/
@Deprecated
public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException {
public static void writeRawField(String field, BytesReference source, XContentBuilder builder,
ToXContent.Params params) throws IOException {
Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) {
try (InputStream compressedStreamInput = compressor.streamInput(source.streamInput())) {
@ -340,7 +343,8 @@ public class XContentHelper {
* {@link XContentType}. Wraps the output into a new anonymous object according to the value returned
* by the {@link ToXContent#isFragment()} method returns.
*/
public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params, boolean humanReadable) throws IOException {
public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params,
boolean humanReadable) throws IOException {
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
builder.humanReadable(humanReadable);
if (toXContent.isFragment()) {

View File

@ -94,7 +94,8 @@ public class ShapeBuilderTests extends ESTestCase {
public void testNewPolygon_coordinates() {
Polygon polygon = new PolygonBuilder(new CoordinatesBuilder()
.coordinates(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30))
.coordinates(new Coordinate(-45, 30), new Coordinate(45, 30),
new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30))
).toPolygon();
LineString exterior = polygon.getExteriorRing();

View File

@ -34,8 +34,10 @@ public class MessageDigestsTests extends ESTestCase {
public void testMd5() throws Exception {
assertHash("d41d8cd98f00b204e9800998ecf8427e", "", MessageDigests.md5());
assertHash("900150983cd24fb0d6963f7d28e17f72", "abc", MessageDigests.md5());
assertHash("8215ef0796a20bcaaae116d3876c664a", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5());
assertHash("7707d6ae4e027c70eea2a935c2296f21", new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5());
assertHash("8215ef0796a20bcaaae116d3876c664a",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5());
assertHash("7707d6ae4e027c70eea2a935c2296f21",
new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5());
assertHash("9e107d9d372bb6826bd81d3542a419d6", "The quick brown fox jumps over the lazy dog", MessageDigests.md5());
assertHash("1055d3e698d289f2af8663725127bd4b", "The quick brown fox jumps over the lazy cog", MessageDigests.md5());
}
@ -43,8 +45,10 @@ public class MessageDigestsTests extends ESTestCase {
public void testSha1() throws Exception {
assertHash("da39a3ee5e6b4b0d3255bfef95601890afd80709", "", MessageDigests.sha1());
assertHash("a9993e364706816aba3e25717850c26c9cd0d89d", "abc", MessageDigests.sha1());
assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1());
assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1());
assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1());
assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f",
new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1());
assertHash("2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", "The quick brown fox jumps over the lazy dog", MessageDigests.sha1());
assertHash("de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", "The quick brown fox jumps over the lazy cog", MessageDigests.sha1());
}
@ -52,10 +56,14 @@ public class MessageDigestsTests extends ESTestCase {
public void testSha256() throws Exception {
assertHash("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "", MessageDigests.sha256());
assertHash("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc", MessageDigests.sha256());
assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256());
assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256());
assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", "The quick brown fox jumps over the lazy dog", MessageDigests.sha256());
assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be", "The quick brown fox jumps over the lazy cog", MessageDigests.sha256());
assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1",
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256());
assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0",
new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256());
assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592",
"The quick brown fox jumps over the lazy dog", MessageDigests.sha256());
assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be",
"The quick brown fox jumps over the lazy cog", MessageDigests.sha256());
}
public void testToHexString() throws Exception {

View File

@ -78,7 +78,8 @@ public class CidrsTests extends ESTestCase {
public void testValidSpecificCases() {
List<Tuple<String, long[]>> cases = new ArrayList<>();
cases.add(new Tuple<>("192.168.0.0/24", new long[]{(192L << 24) + (168 << 16), (192L << 24) + (168 << 16) + (1 << 8)}));
cases.add(new Tuple<>("192.168.128.0/17", new long[]{(192L << 24) + (168 << 16) + (128 << 8), (192L << 24) + (168 << 16) + (128 << 8) + (1 << 15)}));
cases.add(new Tuple<>("192.168.128.0/17",
new long[]{(192L << 24) + (168 << 16) + (128 << 8), (192L << 24) + (168 << 16) + (128 << 8) + (1 << 15)}));
cases.add(new Tuple<>("128.0.0.0/1", new long[]{128L << 24, (128L << 24) + (1L << 31)})); // edge case
cases.add(new Tuple<>("0.0.0.0/0", new long[]{0, 1L << 32})); // edge case
cases.add(new Tuple<>("0.0.0.0/1", new long[]{0, 1L << 31})); // edge case

View File

@ -53,8 +53,10 @@ public class DistanceUnitTests extends ESTestCase {
double testValue = 12345.678;
for (DistanceUnit unit : DistanceUnit.values()) {
assertThat("Unit can be parsed from '" + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
assertThat("Unit can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
assertThat("Value can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue));
assertThat("Unit can be parsed from '" + testValue + unit.toString() + "'",
DistanceUnit.fromString(unit.toString()), equalTo(unit));
assertThat("Value can be parsed from '" + testValue + unit.toString() + "'",
DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue));
}
}

View File

@ -67,7 +67,8 @@ public class FuzzinessTests extends ESTestCase {
try (XContentParser parser = createParser(json)) {
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)));
assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER),
equalTo(XContentParser.Token.VALUE_STRING)));
Fuzziness fuzziness = Fuzziness.parse(parser);
if (value.intValue() >= 1) {
assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue())));

View File

@ -32,7 +32,8 @@ public class LongObjectHashMapTests extends ESSingleNodeTestCase {
public void testDuel() {
final LongObjectHashMap<Object> map1 = new LongObjectHashMap<>();
final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, randombigArrays());
final LongObjectPagedHashMap<Object> map2 =
new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, randombigArrays());
final int maxKey = randomIntBetween(1, 10000);
final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) {

View File

@ -172,7 +172,8 @@ public class EsExecutorsTests extends ESTestCase {
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
ThreadPoolExecutor pool =
EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext);
EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), randomTimeUnit(),
EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
@ -209,7 +210,8 @@ public class EsExecutorsTests extends ESTestCase {
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
final ThreadPoolExecutor pool =
EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext);
EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS,
EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));

View File

@ -159,7 +159,8 @@ public class PrioritizedExecutorsTests extends ESTestCase {
}
public void testSubmitPrioritizedExecutorWithMixed() throws Exception {
ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null);
ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(),
EsExecutors.daemonThreadFactory(getTestName()), holder, null);
List<Integer> results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);

View File

@ -123,7 +123,8 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput());
xContentBuilder.field("test1", "value1");
xContentBuilder.endObject();
assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
assertThat(Strings.toString(xContentBuilder),
equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
}
{
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@ -133,7 +134,8 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput());
xContentBuilder.field("test1", "value1");
xContentBuilder.endObject();
assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
assertThat(Strings.toString(xContentBuilder),
equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
}
}

View File

@ -34,7 +34,8 @@ public class FilterPathGeneratorFilteringTests extends ESTestCase {
private final JsonFactory JSON_FACTORY = new JsonFactory();
public void testInclusiveFilters() throws Exception {
final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
assertResult(SAMPLE, "a", true, "{'a':0}");
assertResult(SAMPLE, "b", true, "{'b':true}");
@ -79,48 +80,80 @@ public class FilterPathGeneratorFilteringTests extends ESTestCase {
}
public void testExclusiveFilters() throws Exception {
final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
assertResult(SAMPLE, "a", false, "{'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "b", false, "{'a':0,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "c", false, "{'a':0,'b':true,'d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "d", false, "{'a':0,'b':true,'c':'c_value','e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "a", false, "{'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "b", false, "{'a':0,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "c", false, "{'a':0,'b':true,'d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "d", false, "{'a':0,'b':true,'c':'c_value','e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "h", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "z", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "h", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "z", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f1", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f1", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'}," +
"{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.f*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'g1':'g1_value','g2':'g2_value'}]," +
"'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}]," +
"'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "h.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "*.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.*.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.i.j.k.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "**.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "h.*.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "**.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
"{'g1':'g1_value','g2':'g2_value'}]}");
assertResult(SAMPLE, "**.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "**.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'}," +
"{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
}