Remove and forbid use of guava Function, Charsets, Collections2
This commit removes and now forbids all uses of Function, Charsets, Collections2 across the codebase. This is one of many steps in the eventual removal of Guava as a dependency. Relates #13224
This commit is contained in:
parent
d0deb28336
commit
40959068d5
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.indices.create;
|
package org.elasticsearch.action.admin.indices.create;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchGenerationException;
|
import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.action.ActionRequest;
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
@ -338,7 +338,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
||||||
* Sets the settings and mappings as a single source.
|
* Sets the settings and mappings as a single source.
|
||||||
*/
|
*/
|
||||||
public CreateIndexRequest source(String source) {
|
public CreateIndexRequest source(String source) {
|
||||||
return source(source.getBytes(Charsets.UTF_8));
|
return source(source.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -374,7 +374,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
||||||
throw new ElasticsearchParseException("failed to parse source for create index", e);
|
throw new ElasticsearchParseException("failed to parse source for create index", e);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
settings(new String(source.toBytes(), Charsets.UTF_8));
|
settings(new String(source.toBytes(), StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.index;
|
package org.elasticsearch.action.index;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchGenerationException;
|
import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -377,7 +377,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
||||||
* or using the {@link #source(byte[])}.
|
* or using the {@link #source(byte[])}.
|
||||||
*/
|
*/
|
||||||
public IndexRequest source(String source) {
|
public IndexRequest source(String source) {
|
||||||
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
|
this.source = new BytesArray(source.getBytes(StandardCharsets.UTF_8));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.indexedscripts.put;
|
package org.elasticsearch.action.indexedscripts.put;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchGenerationException;
|
import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.action.ActionRequest;
|
import org.elasticsearch.action.ActionRequest;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
@ -205,7 +205,7 @@ public class PutIndexedScriptRequest extends ActionRequest<PutIndexedScriptReque
|
||||||
* or using the {@link #source(byte[])}.
|
* or using the {@link #source(byte[])}.
|
||||||
*/
|
*/
|
||||||
public PutIndexedScriptRequest source(String source) {
|
public PutIndexedScriptRequest source(String source) {
|
||||||
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
|
this.source = new BytesArray(source.getBytes(StandardCharsets.UTF_8));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.cluster.metadata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -489,7 +489,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||||
}
|
}
|
||||||
int lastDotIndex = fileName.lastIndexOf('.');
|
int lastDotIndex = fileName.lastIndexOf('.');
|
||||||
String mappingType = lastDotIndex != -1 ? mappingFile.getFileName().toString().substring(0, lastDotIndex) : mappingFile.getFileName().toString();
|
String mappingType = lastDotIndex != -1 ? mappingFile.getFileName().toString().substring(0, lastDotIndex) : mappingFile.getFileName().toString();
|
||||||
try (BufferedReader reader = Files.newBufferedReader(mappingFile, Charsets.UTF_8)) {
|
try (BufferedReader reader = Files.newBufferedReader(mappingFile, StandardCharsets.UTF_8)) {
|
||||||
String mappingSource = Streams.copyToString(reader);
|
String mappingSource = Streams.copyToString(reader);
|
||||||
if (mappings.containsKey(mappingType)) {
|
if (mappings.containsKey(mappingType)) {
|
||||||
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common;
|
package org.elasticsearch.common;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ public final class PidFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
try(OutputStream stream = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
|
try(OutputStream stream = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
|
||||||
stream.write(Long.toString(pid).getBytes(Charsets.UTF_8));
|
stream.write(Long.toString(pid).getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (deleteOnExit) {
|
if (deleteOnExit) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.bytes;
|
package org.elasticsearch.common.bytes;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.io.Channels;
|
import org.elasticsearch.common.io.Channels;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -151,7 +151,7 @@ public class BytesArray implements BytesReference {
|
||||||
if (length == 0) {
|
if (length == 0) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
return new String(bytes, offset, length, Charsets.UTF_8);
|
return new String(bytes, offset, length, StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.bytes;
|
package org.elasticsearch.common.bytes;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.io.Channels;
|
import org.elasticsearch.common.io.Channels;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -111,7 +111,7 @@ public class ChannelBufferBytesReference implements BytesReference {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toUtf8() {
|
public String toUtf8() {
|
||||||
return buffer.toString(Charsets.UTF_8);
|
return buffer.toString(StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,6 +20,8 @@
|
||||||
package org.elasticsearch.common.collect;
|
package org.elasticsearch.common.collect;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import com.google.common.base.Supplier;
|
||||||
|
import com.google.common.base.Suppliers;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
import org.apache.lucene.util.mutable.MutableValueInt;
|
import org.apache.lucene.util.mutable.MutableValueInt;
|
||||||
|
|
||||||
|
@ -34,6 +36,9 @@ import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An immutable map whose writes result in a new copy of the map to be created.
|
* An immutable map whose writes result in a new copy of the map to be created.
|
||||||
|
@ -514,14 +519,18 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
||||||
return copyAndPutAll(other.entrySet());
|
return copyAndPutAll(other.entrySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
<K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Collection<Map.Entry<K1, V1>> entries) {
|
public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Iterable<Entry<K1, V1>> entries) {
|
||||||
CopyOnWriteHashMap<K, V> result = this;
|
CopyOnWriteHashMap<K, V> result = this;
|
||||||
for (Map.Entry<K1, V1> entry : entries) {
|
for (Entry<K1, V1> entry : entries) {
|
||||||
result = result.copyAndPut(entry.getKey(), entry.getValue());
|
result = result.copyAndPut(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Stream<Entry<K1, V1>> entries) {
|
||||||
|
return copyAndPutAll(entries::iterator);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove the given key from this map. The current hash table is not modified.
|
* Remove the given key from this map. The current hash table is not modified.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -19,14 +19,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.collect;
|
package org.elasticsearch.common.collect;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import com.google.common.collect.ForwardingSet;
|
import com.google.common.collect.ForwardingSet;
|
||||||
|
|
||||||
import java.util.AbstractMap;
|
import java.util.AbstractMap;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -78,13 +74,9 @@ public class CopyOnWriteHashSet<T> extends ForwardingSet<T> {
|
||||||
* case of equality.
|
* case of equality.
|
||||||
*/
|
*/
|
||||||
public CopyOnWriteHashSet<T> copyAndAddAll(Collection<? extends T> entries) {
|
public CopyOnWriteHashSet<T> copyAndAddAll(Collection<? extends T> entries) {
|
||||||
final Collection<Entry<T, Boolean>> asMapEntries = Collections2.transform(entries,new Function<T, Map.Entry<T, Boolean>>() {
|
CopyOnWriteHashMap<T, Boolean> updated = this.map.copyAndPutAll(entries.stream().map(
|
||||||
@Override
|
p -> new AbstractMap.SimpleImmutableEntry<>(p, true)
|
||||||
public Entry<T, Boolean> apply(T input) {
|
));
|
||||||
return new AbstractMap.SimpleImmutableEntry<>(input, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CopyOnWriteHashMap<T, Boolean> updated = this.map.copyAndPutAll(asMapEntries);
|
|
||||||
return new CopyOnWriteHashSet<>(updated);
|
return new CopyOnWriteHashSet<>(updated);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.http.client;
|
package org.elasticsearch.common.http.client;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.hash.Hashing;
|
import com.google.common.hash.Hashing;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.*;
|
import org.elasticsearch.*;
|
||||||
|
@ -133,7 +133,7 @@ public class HttpDownloadHelper {
|
||||||
try {
|
try {
|
||||||
if (download(checksumURL, checksumFile, progress, timeout)) {
|
if (download(checksumURL, checksumFile, progress, timeout)) {
|
||||||
byte[] fileBytes = Files.readAllBytes(originalFile);
|
byte[] fileBytes = Files.readAllBytes(originalFile);
|
||||||
List<String> checksumLines = Files.readAllLines(checksumFile, Charsets.UTF_8);
|
List<String> checksumLines = Files.readAllLines(checksumFile, StandardCharsets.UTF_8);
|
||||||
if (checksumLines.size() != 1) {
|
if (checksumLines.size() != 1) {
|
||||||
throw new ElasticsearchCorruptionException("invalid format for checksum file (" +
|
throw new ElasticsearchCorruptionException("invalid format for checksum file (" +
|
||||||
hashFunc.name() + "), expected 1 line, got: " + checksumLines.size());
|
hashFunc.name() + "), expected 1 line, got: " + checksumLines.size());
|
||||||
|
@ -345,7 +345,7 @@ public class HttpDownloadHelper {
|
||||||
if (!isSecureProcotol) {
|
if (!isSecureProcotol) {
|
||||||
throw new IOException("Basic auth is only supported for HTTPS!");
|
throw new IOException("Basic auth is only supported for HTTPS!");
|
||||||
}
|
}
|
||||||
String basicAuth = Base64.encodeBytes(aSource.getUserInfo().getBytes(Charsets.UTF_8));
|
String basicAuth = Base64.encodeBytes(aSource.getUserInfo().getBytes(StandardCharsets.UTF_8));
|
||||||
connection.setRequestProperty("Authorization", "Basic " + basicAuth);
|
connection.setRequestProperty("Authorization", "Basic " + basicAuth);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.elasticsearch.common.inject.internal;
|
||||||
import com.google.common.cache.CacheBuilder;
|
import com.google.common.cache.CacheBuilder;
|
||||||
import com.google.common.cache.CacheLoader;
|
import com.google.common.cache.CacheLoader;
|
||||||
import com.google.common.cache.LoadingCache;
|
import com.google.common.cache.LoadingCache;
|
||||||
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
|
@ -28,6 +29,8 @@ import java.util.concurrent.ExecutionException;
|
||||||
*
|
*
|
||||||
* @author jessewilson@google.com (Jesse Wilson)
|
* @author jessewilson@google.com (Jesse Wilson)
|
||||||
*/
|
*/
|
||||||
|
// TODO remove this suppression once we get rid of the CacheBuilder and friends
|
||||||
|
@SuppressForbidden(reason = "this uses Function in it's method declaration somewhere")
|
||||||
public abstract class FailableCache<K, V> {
|
public abstract class FailableCache<K, V> {
|
||||||
|
|
||||||
private final LoadingCache<K, Object> delegate = CacheBuilder.newBuilder().build(new CacheLoader<K, Object>() {
|
private final LoadingCache<K, Object> delegate = CacheBuilder.newBuilder().build(new CacheLoader<K, Object>() {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.io;
|
package org.elasticsearch.common.io;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.util.Callback;
|
import org.elasticsearch.common.util.Callback;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
@ -234,7 +234,7 @@ public abstract class Streams {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void readAllLines(InputStream input, Callback<String> callback) throws IOException {
|
public static void readAllLines(InputStream input, Callback<String> callback) throws IOException {
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))) {
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) {
|
||||||
String line;
|
String line;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
callback.handle(line);
|
callback.handle(line);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.settings;
|
package org.elasticsearch.common.settings;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
|
@ -1066,7 +1066,7 @@ public final class Settings implements ToXContent {
|
||||||
public Builder loadFromStream(String resourceName, InputStream is) throws SettingsException {
|
public Builder loadFromStream(String resourceName, InputStream is) throws SettingsException {
|
||||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName);
|
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName);
|
||||||
try {
|
try {
|
||||||
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8)));
|
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)));
|
||||||
put(loadedSettings);
|
put(loadedSettings);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.text;
|
package org.elasticsearch.common.text;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -54,7 +54,7 @@ public class BytesText implements Text {
|
||||||
if (!bytes.hasArray()) {
|
if (!bytes.hasArray()) {
|
||||||
bytes = bytes.toBytesArray();
|
bytes = bytes.toBytesArray();
|
||||||
}
|
}
|
||||||
return new String(bytes.array(), bytes.arrayOffset(), bytes.length(), Charsets.UTF_8);
|
return new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.text;
|
package org.elasticsearch.common.text;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ public class StringAndBytesText implements Text {
|
||||||
@Override
|
@Override
|
||||||
public BytesReference bytes() {
|
public BytesReference bytes() {
|
||||||
if (bytes == null) {
|
if (bytes == null) {
|
||||||
bytes = new BytesArray(text.getBytes(Charsets.UTF_8));
|
bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
return bytes;
|
return bytes;
|
||||||
}
|
}
|
||||||
|
@ -78,7 +78,7 @@ public class StringAndBytesText implements Text {
|
||||||
if (!bytes.hasArray()) {
|
if (!bytes.hasArray()) {
|
||||||
bytes = bytes.toBytesArray();
|
bytes = bytes.toBytesArray();
|
||||||
}
|
}
|
||||||
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), Charsets.UTF_8);
|
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.text;
|
package org.elasticsearch.common.text;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ public class StringText implements Text {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesReference bytes() {
|
public BytesReference bytes() {
|
||||||
return new BytesArray(text.getBytes(Charsets.UTF_8));
|
return new BytesArray(text.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.DoubleArrayList;
|
||||||
import com.carrotsearch.hppc.FloatArrayList;
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
import com.carrotsearch.hppc.LongArrayList;
|
import com.carrotsearch.hppc.LongArrayList;
|
||||||
import com.carrotsearch.hppc.ObjectArrayList;
|
import com.carrotsearch.hppc.ObjectArrayList;
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -394,20 +393,6 @@ public enum CollectionUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <E, T> List<T> eagerTransform(List<E> list, Function<E, T> transform) {
|
|
||||||
if (list == null) {
|
|
||||||
throw new NullPointerException("list");
|
|
||||||
}
|
|
||||||
if (transform == null) {
|
|
||||||
throw new NullPointerException("transform");
|
|
||||||
}
|
|
||||||
List<T> result = new ArrayList<>(list.size());
|
|
||||||
for (E element : list) {
|
|
||||||
result.add(transform.apply(element));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <E> ArrayList<E> arrayAsArrayList(E... elements) {
|
public static <E> ArrayList<E> arrayAsArrayList(E... elements) {
|
||||||
if (elements == null) {
|
if (elements == null) {
|
||||||
throw new NullPointerException("elements");
|
throw new NullPointerException("elements");
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
@ -129,14 +129,14 @@ public class MultiDataPathUpgrader {
|
||||||
*/
|
*/
|
||||||
public void checkIndex(ShardPath targetPath) throws IOException {
|
public void checkIndex(ShardPath targetPath) throws IOException {
|
||||||
BytesStreamOutput os = new BytesStreamOutput();
|
BytesStreamOutput os = new BytesStreamOutput();
|
||||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||||
try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex());
|
try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex());
|
||||||
final CheckIndex checkIndex = new CheckIndex(directory)) {
|
final CheckIndex checkIndex = new CheckIndex(directory)) {
|
||||||
checkIndex.setInfoStream(out);
|
checkIndex.setInfoStream(out);
|
||||||
CheckIndex.Status status = checkIndex.checkIndex();
|
CheckIndex.Status status = checkIndex.checkIndex();
|
||||||
out.flush();
|
out.flush();
|
||||||
if (!status.clean) {
|
if (!status.clean) {
|
||||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
throw new IllegalStateException("index check failure");
|
throw new IllegalStateException("index check failure");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.xcontent;
|
package org.elasticsearch.common.xcontent;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
@ -1226,7 +1226,7 @@ public final class XContentBuilder implements BytesStream, Releasable {
|
||||||
public String string() throws IOException {
|
public String string() throws IOException {
|
||||||
close();
|
close();
|
||||||
BytesArray bytesArray = bytes().toBytesArray();
|
BytesArray bytesArray = bytes().toBytesArray();
|
||||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.xcontent;
|
package org.elasticsearch.common.xcontent;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
@ -100,7 +100,7 @@ public class XContentHelper {
|
||||||
XContentType xContentType = XContentFactory.xContentType(bytes);
|
XContentType xContentType = XContentFactory.xContentType(bytes);
|
||||||
if (xContentType == XContentType.JSON && !reformatJson) {
|
if (xContentType == XContentType.JSON && !reformatJson) {
|
||||||
BytesArray bytesArray = bytes.toBytesArray();
|
BytesArray bytesArray = bytes.toBytesArray();
|
||||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
XContentParser parser = null;
|
XContentParser parser = null;
|
||||||
try {
|
try {
|
||||||
|
@ -126,7 +126,7 @@ public class XContentHelper {
|
||||||
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint) throws IOException {
|
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint) throws IOException {
|
||||||
XContentType xContentType = XContentFactory.xContentType(data, offset, length);
|
XContentType xContentType = XContentFactory.xContentType(data, offset, length);
|
||||||
if (xContentType == XContentType.JSON && !reformatJson) {
|
if (xContentType == XContentType.JSON && !reformatJson) {
|
||||||
return new String(data, offset, length, Charsets.UTF_8);
|
return new String(data, offset, length, StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
XContentParser parser = null;
|
XContentParser parser = null;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
|
|
@ -19,9 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
@ -177,12 +175,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<IndexShard> iterator() {
|
public Iterator<IndexShard> iterator() {
|
||||||
return Iterators.transform(shards.values().iterator(), new Function<IndexShardInjectorPair, IndexShard>() {
|
return shards.values().stream().map((p) -> p.getIndexShard()).iterator();
|
||||||
@Override
|
|
||||||
public IndexShard apply(IndexShardInjectorPair input) {
|
|
||||||
return input.getIndexShard();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasShard(int shardId) {
|
public boolean hasShard(int shardId) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.NumericTokenStream;
|
import org.apache.lucene.analysis.NumericTokenStream;
|
||||||
|
@ -235,7 +235,7 @@ public class Analysis {
|
||||||
|
|
||||||
final Path wordListFile = env.configFile().resolve(wordListPath);
|
final Path wordListFile = env.configFile().resolve(wordListPath);
|
||||||
|
|
||||||
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), Charsets.UTF_8)) {
|
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) {
|
||||||
return loadWordList(reader, "#");
|
return loadWordList(reader, "#");
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||||
|
@ -283,7 +283,7 @@ public class Analysis {
|
||||||
final Path path = env.configFile().resolve(filePath);
|
final Path path = env.configFile().resolve(filePath);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), Charsets.UTF_8);
|
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||||
throw new IllegalArgumentException(message);
|
throw new IllegalArgumentException(message);
|
||||||
|
|
|
@ -23,8 +23,10 @@ import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||||
|
|
||||||
|
import java.util.AbstractMap;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -35,7 +37,7 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
|
||||||
private final Analyzer defaultAnalyzer;
|
private final Analyzer defaultAnalyzer;
|
||||||
|
|
||||||
public FieldNameAnalyzer(Analyzer defaultAnalyzer) {
|
public FieldNameAnalyzer(Analyzer defaultAnalyzer) {
|
||||||
this(new CopyOnWriteHashMap<String, Analyzer>(), defaultAnalyzer);
|
this(new CopyOnWriteHashMap<>(), defaultAnalyzer);
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldNameAnalyzer(Map<String, Analyzer> analyzers, Analyzer defaultAnalyzer) {
|
public FieldNameAnalyzer(Map<String, Analyzer> analyzers, Analyzer defaultAnalyzer) {
|
||||||
|
@ -66,16 +68,14 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
|
||||||
/**
|
/**
|
||||||
* Return a new instance that contains the union of this and of the provided analyzers.
|
* Return a new instance that contains the union of this and of the provided analyzers.
|
||||||
*/
|
*/
|
||||||
public FieldNameAnalyzer copyAndAddAll(Collection<? extends Map.Entry<String, Analyzer>> mappers) {
|
public FieldNameAnalyzer copyAndAddAll(Stream<? extends Map.Entry<String, Analyzer>> mappers) {
|
||||||
CopyOnWriteHashMap<String, Analyzer> analyzers = this.analyzers;
|
CopyOnWriteHashMap<String, Analyzer> result = analyzers.copyAndPutAll(mappers.map((e) -> {
|
||||||
for (Map.Entry<String, Analyzer> entry : mappers) {
|
if (e.getValue() == null) {
|
||||||
Analyzer analyzer = entry.getValue();
|
return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer);
|
||||||
if (analyzer == null) {
|
|
||||||
analyzer = defaultAnalyzer;
|
|
||||||
}
|
}
|
||||||
analyzers = analyzers.copyAndPut(entry.getKey(), analyzer);
|
return e;
|
||||||
}
|
}));
|
||||||
return new FieldNameAnalyzer(analyzers, defaultAnalyzer);
|
return new FieldNameAnalyzer(result, defaultAnalyzer);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
@ -65,28 +63,19 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
||||||
for (FieldMapper fieldMapper : newMappers) {
|
for (FieldMapper fieldMapper : newMappers) {
|
||||||
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
|
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
|
||||||
}
|
}
|
||||||
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||||
@Override
|
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer())
|
||||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
));
|
||||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer());
|
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||||
}
|
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer())
|
||||||
}));
|
));
|
||||||
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||||
@Override
|
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer())
|
||||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
));
|
||||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer());
|
return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer);
|
||||||
}
|
|
||||||
}));
|
|
||||||
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
|
||||||
@Override
|
|
||||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
|
||||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer());
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
return new DocumentFieldMappers(map, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the mapper for the given field */
|
/** Returns the mapper for the given field */
|
||||||
public FieldMapper getMapper(String field) {
|
public FieldMapper getMapper(String field) {
|
||||||
return fieldMappers.get(field);
|
return fieldMappers.get(field);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.index.IndexOptions;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
@ -47,6 +45,7 @@ import java.util.Comparator;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
public abstract class FieldMapper extends Mapper {
|
public abstract class FieldMapper extends Mapper {
|
||||||
|
|
||||||
|
@ -658,12 +657,7 @@ public abstract class FieldMapper extends Mapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iterator<Mapper> iterator() {
|
public Iterator<Mapper> iterator() {
|
||||||
return Iterators.transform(mappers.values().iterator(), new Function<ObjectCursor<FieldMapper>, Mapper>() {
|
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
|
||||||
@Override
|
|
||||||
public Mapper apply(@Nullable ObjectCursor<FieldMapper> cursor) {
|
|
||||||
return cursor.value;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
|
||||||
|
@ -35,12 +33,6 @@ import java.util.Set;
|
||||||
* An immutable container for looking up {@link MappedFieldType}s by their name.
|
* An immutable container for looking up {@link MappedFieldType}s by their name.
|
||||||
*/
|
*/
|
||||||
class FieldTypeLookup implements Iterable<MappedFieldType> {
|
class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
private static final Function<MappedFieldTypeReference, MappedFieldType> UNWRAPPER = new Function<MappedFieldTypeReference, MappedFieldType>() {
|
|
||||||
@Override
|
|
||||||
public MappedFieldType apply(MappedFieldTypeReference ref) {
|
|
||||||
return ref.get();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/** Full field name to field type */
|
/** Full field name to field type */
|
||||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
||||||
|
@ -179,6 +171,6 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iterator<MappedFieldType> iterator() {
|
public Iterator<MappedFieldType> iterator() {
|
||||||
return Iterators.transform(fullNameToFieldType.values().iterator(), UNWRAPPER);
|
return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
|
@ -70,6 +69,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||||
|
|
||||||
|
@ -84,22 +84,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
"_size", "_timestamp", "_ttl"
|
"_size", "_timestamp", "_ttl"
|
||||||
);
|
);
|
||||||
|
|
||||||
private static final Function<MappedFieldType, Analyzer> INDEX_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
|
||||||
public Analyzer apply(MappedFieldType fieldType) {
|
|
||||||
return fieldType.indexAnalyzer();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
private static final Function<MappedFieldType, Analyzer> SEARCH_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
|
||||||
public Analyzer apply(MappedFieldType fieldType) {
|
|
||||||
return fieldType.searchAnalyzer();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
private static final Function<MappedFieldType, Analyzer> SEARCH_QUOTE_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
|
||||||
public Analyzer apply(MappedFieldType fieldType) {
|
|
||||||
return fieldType.searchQuoteAnalyzer();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private final AnalysisService analysisService;
|
private final AnalysisService analysisService;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -142,9 +126,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
this.analysisService = analysisService;
|
this.analysisService = analysisService;
|
||||||
this.fieldTypes = new FieldTypeLookup();
|
this.fieldTypes = new FieldTypeLookup();
|
||||||
this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityLookupService, scriptService);
|
this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityLookupService, scriptService);
|
||||||
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), INDEX_ANALYZER_EXTRACTOR);
|
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), p -> p.indexAnalyzer());
|
||||||
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), SEARCH_ANALYZER_EXTRACTOR);
|
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), p -> p.searchAnalyzer());
|
||||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), SEARCH_QUOTE_ANALYZER_EXTRACTOR);
|
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
|
||||||
|
|
||||||
this.dynamic = indexSettings.getAsBoolean("index.mapper.dynamic", true);
|
this.dynamic = indexSettings.getAsBoolean("index.mapper.dynamic", true);
|
||||||
defaultPercolatorMappingSource = "{\n" +
|
defaultPercolatorMappingSource = "{\n" +
|
||||||
|
@ -194,17 +178,14 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
* As is this not really an active type, you would typically set this to false
|
* As is this not really an active type, you would typically set this to false
|
||||||
*/
|
*/
|
||||||
public Iterable<DocumentMapper> docMappers(final boolean includingDefaultMapping) {
|
public Iterable<DocumentMapper> docMappers(final boolean includingDefaultMapping) {
|
||||||
return new Iterable<DocumentMapper>() {
|
return () -> {
|
||||||
@Override
|
final Iterator<DocumentMapper> iterator;
|
||||||
public Iterator<DocumentMapper> iterator() {
|
if (includingDefaultMapping) {
|
||||||
final Iterator<DocumentMapper> iterator;
|
iterator = mappers.values().iterator();
|
||||||
if (includingDefaultMapping) {
|
} else {
|
||||||
iterator = mappers.values().iterator();
|
iterator = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).iterator();
|
||||||
} else {
|
|
||||||
iterator = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).iterator();
|
|
||||||
}
|
|
||||||
return Iterators.unmodifiableIterator(iterator);
|
|
||||||
}
|
}
|
||||||
|
return Iterators.unmodifiableIterator(iterator);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ public class WrapperQueryBuilder extends QueryBuilder {
|
||||||
* Creates a query builder given a query provided as a string
|
* Creates a query builder given a query provided as a string
|
||||||
*/
|
*/
|
||||||
public WrapperQueryBuilder(String source) {
|
public WrapperQueryBuilder(String source) {
|
||||||
this.source = source.getBytes(Charsets.UTF_8);
|
this.source = source.getBytes(StandardCharsets.UTF_8);
|
||||||
this.offset = 0;
|
this.offset = 0;
|
||||||
this.length = this.source.length;
|
this.length = this.source.length;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.shard;
|
package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.codecs.PostingsFormat;
|
import org.apache.lucene.codecs.PostingsFormat;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
|
@ -1202,7 +1202,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
BytesStreamOutput os = new BytesStreamOutput();
|
BytesStreamOutput os = new BytesStreamOutput();
|
||||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||||
|
|
||||||
if ("checksum".equalsIgnoreCase(checkIndexOnStartup)) {
|
if ("checksum".equalsIgnoreCase(checkIndexOnStartup)) {
|
||||||
// physical verification only: verify all checksums for the latest commit
|
// physical verification only: verify all checksums for the latest commit
|
||||||
|
@ -1220,7 +1220,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||||
}
|
}
|
||||||
out.flush();
|
out.flush();
|
||||||
if (corrupt != null) {
|
if (corrupt != null) {
|
||||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
throw corrupt;
|
throw corrupt;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1235,7 +1235,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||||
// ignore if closed....
|
// ignore if closed....
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
if ("fix".equalsIgnoreCase(checkIndexOnStartup)) {
|
if ("fix".equalsIgnoreCase(checkIndexOnStartup)) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("fixing index, writing new segments file ...");
|
logger.debug("fixing index, writing new segments file ...");
|
||||||
|
@ -1253,7 +1253,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS)));
|
recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS)));
|
||||||
|
|
|
@ -19,11 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices;
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
|
@ -97,6 +94,8 @@ import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
|
@ -277,12 +276,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<IndexService> iterator() {
|
public Iterator<IndexService> iterator() {
|
||||||
return Iterators.transform(indices.values().iterator(), new Function<IndexServiceInjectorPair, IndexService>() {
|
return indices.values().stream().map((p) -> p.getIndexService()).iterator();
|
||||||
@Override
|
|
||||||
public IndexService apply(IndexServiceInjectorPair input) {
|
|
||||||
return input.getIndexService();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasIndex(String index) {
|
public boolean hasIndex(String index) {
|
||||||
|
@ -404,12 +398,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
||||||
if (delete) {
|
if (delete) {
|
||||||
indicesLifecycle.beforeIndexDeleted(indexService);
|
indicesLifecycle.beforeIndexDeleted(indexService);
|
||||||
}
|
}
|
||||||
IOUtils.close(Iterables.transform(pluginsService.indexServices(), new Function<Class<? extends Closeable>, Closeable>() {
|
Stream<Closeable> closeables = pluginsService.indexServices().stream().map(p -> indexInjector.getInstance(p));
|
||||||
@Override
|
IOUtils.close(closeables::iterator);
|
||||||
public Closeable apply(Class<? extends Closeable> input) {
|
|
||||||
return indexInjector.getInstance(input);
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
logger.debug("[{}] closing index service (reason [{}])", index, reason);
|
logger.debug("[{}] closing index service (reason [{}])", index, reason);
|
||||||
indexService.close(reason, delete);
|
indexService.close(reason, delete);
|
||||||
|
|
|
@ -18,17 +18,16 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.indices.analysis;
|
package org.elasticsearch.indices.analysis;
|
||||||
|
|
||||||
import com.google.common.cache.CacheBuilder;
|
|
||||||
import com.google.common.cache.CacheLoader;
|
|
||||||
import com.google.common.cache.LoadingCache;
|
|
||||||
import com.google.common.util.concurrent.UncheckedExecutionException;
|
import com.google.common.util.concurrent.UncheckedExecutionException;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.hunspell.Dictionary;
|
import org.apache.lucene.analysis.hunspell.Dictionary;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.concurrent.KeyedLock;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -72,32 +71,21 @@ public class HunspellService extends AbstractComponent {
|
||||||
public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy";
|
public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy";
|
||||||
public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case";
|
public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case";
|
||||||
private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location";
|
private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location";
|
||||||
private final LoadingCache<String, Dictionary> dictionaries;
|
private final Environment env;
|
||||||
|
private volatile CopyOnWriteHashMap<String, Dictionary> dictionaries = new CopyOnWriteHashMap<>();
|
||||||
private final Map<String, Dictionary> knownDictionaries;
|
private final Map<String, Dictionary> knownDictionaries;
|
||||||
|
private KeyedLock<String> keyedLock = new KeyedLock<>();
|
||||||
|
|
||||||
private final boolean defaultIgnoreCase;
|
private final boolean defaultIgnoreCase;
|
||||||
private final Path hunspellDir;
|
private final Path hunspellDir;
|
||||||
|
|
||||||
public HunspellService(final Settings settings, final Environment env) throws IOException {
|
|
||||||
this(settings, env, Collections.<String, Dictionary>emptyMap());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException {
|
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.knownDictionaries = knownDictionaries;
|
this.knownDictionaries = knownDictionaries;
|
||||||
this.hunspellDir = resolveHunspellDirectory(settings, env);
|
this.hunspellDir = resolveHunspellDirectory(settings, env);
|
||||||
this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false);
|
this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false);
|
||||||
dictionaries = CacheBuilder.newBuilder().build(new CacheLoader<String, Dictionary>() {
|
this.env = env;
|
||||||
@Override
|
|
||||||
public Dictionary load(String locale) throws Exception {
|
|
||||||
Dictionary dictionary = knownDictionaries.get(locale);
|
|
||||||
if (dictionary == null) {
|
|
||||||
dictionary = loadDictionary(locale, settings, env);
|
|
||||||
}
|
|
||||||
return dictionary;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) {
|
if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) {
|
||||||
scanAndLoadDictionaries();
|
scanAndLoadDictionaries();
|
||||||
}
|
}
|
||||||
|
@ -109,7 +97,24 @@ public class HunspellService extends AbstractComponent {
|
||||||
* @param locale The name of the locale
|
* @param locale The name of the locale
|
||||||
*/
|
*/
|
||||||
public Dictionary getDictionary(String locale) {
|
public Dictionary getDictionary(String locale) {
|
||||||
return dictionaries.getUnchecked(locale);
|
Dictionary dictionary = dictionaries.get(locale);
|
||||||
|
if (dictionary == null) {
|
||||||
|
dictionary = knownDictionaries.get(locale);
|
||||||
|
if (dictionary == null) {
|
||||||
|
keyedLock.acquire(locale);
|
||||||
|
dictionary = dictionaries.get(locale);
|
||||||
|
if (dictionary == null) {
|
||||||
|
try {
|
||||||
|
dictionary = loadDictionary(locale, settings, env);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new IllegalStateException("failed to load hunspell dictionary for local: " + locale, e);
|
||||||
|
}
|
||||||
|
dictionaries = dictionaries.copyAndPut(locale, dictionary);
|
||||||
|
}
|
||||||
|
keyedLock.release(locale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dictionary;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Path resolveHunspellDirectory(Settings settings, Environment env) {
|
private Path resolveHunspellDirectory(Settings settings, Environment env) {
|
||||||
|
@ -131,7 +136,7 @@ public class HunspellService extends AbstractComponent {
|
||||||
try (DirectoryStream<Path> inner = Files.newDirectoryStream(hunspellDir.resolve(file), "*.dic")) {
|
try (DirectoryStream<Path> inner = Files.newDirectoryStream(hunspellDir.resolve(file), "*.dic")) {
|
||||||
if (inner.iterator().hasNext()) { // just making sure it's indeed a dictionary dir
|
if (inner.iterator().hasNext()) { // just making sure it's indeed a dictionary dir
|
||||||
try {
|
try {
|
||||||
dictionaries.getUnchecked(file.getFileName().toString());
|
getDictionary(file.getFileName().toString());
|
||||||
} catch (UncheckedExecutionException e) {
|
} catch (UncheckedExecutionException e) {
|
||||||
// The cache loader throws unchecked exception (see #loadDictionary()),
|
// The cache loader throws unchecked exception (see #loadDictionary()),
|
||||||
// here we simply report the exception and continue loading the dictionaries
|
// here we simply report the exception and continue loading the dictionaries
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.node.internal;
|
package org.elasticsearch.node.internal;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.cluster.ClusterName;
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
@ -221,7 +221,7 @@ public class InternalSettingsPreparer {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
List<String> names = new ArrayList<>();
|
List<String> names = new ArrayList<>();
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))) {
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) {
|
||||||
String name = reader.readLine();
|
String name = reader.readLine();
|
||||||
while (name != null) {
|
while (name != null) {
|
||||||
names.add(name);
|
names.add(name);
|
||||||
|
|
|
@ -99,8 +99,9 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.index.mapper.SourceToParse.source;
|
import static org.elasticsearch.index.mapper.SourceToParse.source;
|
||||||
import static org.elasticsearch.percolator.QueryCollector.count;
|
import static org.elasticsearch.percolator.QueryCollector.count;
|
||||||
import static org.elasticsearch.percolator.QueryCollector.match;
|
import static org.elasticsearch.percolator.QueryCollector.match;
|
||||||
|
@ -866,7 +867,9 @@ public class PercolatorService extends AbstractComponent {
|
||||||
if (aggregations != null) {
|
if (aggregations != null) {
|
||||||
List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators();
|
List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators();
|
||||||
if (pipelineAggregators != null) {
|
if (pipelineAggregators != null) {
|
||||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||||
bigArrays, scriptService, headersContext));
|
bigArrays, scriptService, headersContext));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.rest.support;
|
package org.elasticsearch.rest.support;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.path.PathTrie;
|
import org.elasticsearch.common.path.PathTrie;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -113,7 +113,7 @@ public class RestUtils {
|
||||||
* escape sequence.
|
* escape sequence.
|
||||||
*/
|
*/
|
||||||
public static String decodeComponent(final String s) {
|
public static String decodeComponent(final String s) {
|
||||||
return decodeComponent(s, Charsets.UTF_8);
|
return decodeComponent(s, StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -134,7 +134,7 @@ public class RestUtils {
|
||||||
*
|
*
|
||||||
* @param s The string to decode (can be empty).
|
* @param s The string to decode (can be empty).
|
||||||
* @param charset The charset to use to decode the string (should really
|
* @param charset The charset to use to decode the string (should really
|
||||||
* be {@link Charsets#UTF_8}.
|
* be {@link StandardCharsets#UTF_8}.
|
||||||
* @return The decoded string, or {@code s} if there's nothing to decode.
|
* @return The decoded string, or {@code s} if there's nothing to decode.
|
||||||
* If the string to decode is {@code null}, returns an empty string.
|
* If the string to decode is {@code null}, returns an empty string.
|
||||||
* @throws IllegalArgumentException if the string contains a malformed
|
* @throws IllegalArgumentException if the string contains a malformed
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.script;
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.cache.Cache;
|
import com.google.common.cache.Cache;
|
||||||
import com.google.common.cache.CacheBuilder;
|
import com.google.common.cache.CacheBuilder;
|
||||||
import com.google.common.cache.RemovalListener;
|
import com.google.common.cache.RemovalListener;
|
||||||
|
@ -543,7 +543,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||||
// with file scripts are disabled, it makes no sense to even compile it and cache it.
|
// with file scripts are disabled, it makes no sense to even compile it and cache it.
|
||||||
if (isAnyScriptContextEnabled(engineService.types()[0], engineService, ScriptType.FILE)) {
|
if (isAnyScriptContextEnabled(engineService.types()[0], engineService, ScriptType.FILE)) {
|
||||||
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
||||||
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), Charsets.UTF_8)) {
|
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) {
|
||||||
String script = Streams.copyToString(reader);
|
String script = Streams.copyToString(reader);
|
||||||
String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null);
|
String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null);
|
||||||
staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script)));
|
staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script)));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.script.groovy;
|
package org.elasticsearch.script.groovy;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.hash.Hashing;
|
import com.google.common.hash.Hashing;
|
||||||
import groovy.lang.Binding;
|
import groovy.lang.Binding;
|
||||||
import groovy.lang.GroovyClassLoader;
|
import groovy.lang.GroovyClassLoader;
|
||||||
|
@ -111,7 +111,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
||||||
@Override
|
@Override
|
||||||
public Object compile(String script) {
|
public Object compile(String script) {
|
||||||
try {
|
try {
|
||||||
return loader.parseClass(script, Hashing.sha1().hashString(script, Charsets.UTF_8).toString());
|
return loader.parseClass(script, Hashing.sha1().hashString(script, StandardCharsets.UTF_8).toString());
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("exception compiling Groovy script:", e);
|
logger.trace("exception compiling Groovy script:", e);
|
||||||
|
|
|
@ -18,9 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -38,21 +36,13 @@ import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An internal implementation of {@link Aggregations}.
|
* An internal implementation of {@link Aggregations}.
|
||||||
*/
|
*/
|
||||||
public class InternalAggregations implements Aggregations, ToXContent, Streamable {
|
public class InternalAggregations implements Aggregations, ToXContent, Streamable {
|
||||||
|
|
||||||
public final static InternalAggregations EMPTY = new InternalAggregations();
|
public final static InternalAggregations EMPTY = new InternalAggregations();
|
||||||
private static final Function<InternalAggregation, Aggregation> SUPERTYPE_CAST = new Function<InternalAggregation, Aggregation>() {
|
|
||||||
@Override
|
|
||||||
public Aggregation apply(InternalAggregation input) {
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private List<InternalAggregation> aggregations = Collections.emptyList();
|
private List<InternalAggregation> aggregations = Collections.emptyList();
|
||||||
|
|
||||||
|
@ -73,7 +63,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Aggregation> iterator() {
|
public Iterator<Aggregation> iterator() {
|
||||||
return Iterators.transform(aggregations.iterator(), SUPERTYPE_CAST);
|
return aggregations.stream().map((p) -> (Aggregation) p).iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -81,7 +71,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<Aggregation> asList() {
|
public List<Aggregation> asList() {
|
||||||
return eagerTransform(aggregations, SUPERTYPE_CAST);
|
return aggregations.stream().map((p) -> (Aggregation) p).collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,14 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.pipeline;
|
package org.elasticsearch.search.aggregations.pipeline;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.Aggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||||
|
@ -73,13 +71,6 @@ public abstract class PipelineAggregator implements Streamable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final Function<Aggregation, InternalAggregation> AGGREGATION_TRANFORM_FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
|
||||||
@Override
|
|
||||||
public InternalAggregation apply(Aggregation input) {
|
|
||||||
return (InternalAggregation) input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private String name;
|
private String name;
|
||||||
private String[] bucketsPaths;
|
private String[] bucketsPaths;
|
||||||
private Map<String, Object> metaData;
|
private Map<String, Object> metaData;
|
||||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Buck
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
||||||
|
|
||||||
|
@ -54,8 +54,9 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
||||||
for (int i = 0; i < buckets.size(); i++) {
|
for (int i = 0; i < buckets.size(); i++) {
|
||||||
InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i);
|
InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i);
|
||||||
InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext);
|
InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext);
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
AGGREGATION_TRANFORM_FUNCTION));
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(aggToAdd);
|
aggs.add(aggToAdd);
|
||||||
InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs),
|
InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs),
|
||||||
bucket);
|
bucket);
|
||||||
|
@ -66,8 +67,9 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
||||||
} else if (aggregation instanceof InternalSingleBucketAggregation) {
|
} else if (aggregation instanceof InternalSingleBucketAggregation) {
|
||||||
InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation;
|
InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation;
|
||||||
InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext);
|
InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext);
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(singleBucketAgg.getAggregations().asList(),
|
List<InternalAggregation> aggs = StreamSupport.stream(singleBucketAgg.getAggregations().spliterator(), false).map((p) -> {
|
||||||
AGGREGATION_TRANFORM_FUNCTION));
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(aggToAdd);
|
aggs.add(aggToAdd);
|
||||||
return singleBucketAgg.create(new InternalAggregations(aggs));
|
return singleBucketAgg.create(new InternalAggregations(aggs));
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -19,14 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.pipeline.bucketscript;
|
package org.elasticsearch.search.aggregations.pipeline.bucketscript;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.script.CompiledScript;
|
import org.elasticsearch.script.CompiledScript;
|
||||||
import org.elasticsearch.script.ExecutableScript;
|
import org.elasticsearch.script.ExecutableScript;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.search.aggregations.Aggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||||
|
@ -43,38 +41,26 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
import java.util.stream.Collectors;
|
||||||
import java.util.List;
|
import java.util.stream.StreamSupport;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||||
|
|
||||||
public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
||||||
|
|
||||||
public final static Type TYPE = new Type("bucket_script");
|
public final static Type TYPE = new Type("bucket_script");
|
||||||
|
|
||||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||||
@Override
|
BucketScriptPipelineAggregator result = new BucketScriptPipelineAggregator();
|
||||||
public BucketScriptPipelineAggregator readResult(StreamInput in) throws IOException {
|
result.readFrom(in);
|
||||||
BucketScriptPipelineAggregator result = new BucketScriptPipelineAggregator();
|
return result;
|
||||||
result.readFrom(in);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public static void registerStreams() {
|
public static void registerStreams() {
|
||||||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
|
||||||
@Override
|
|
||||||
public InternalAggregation apply(Aggregation input) {
|
|
||||||
return (InternalAggregation) input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private ValueFormatter formatter;
|
private ValueFormatter formatter;
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
|
|
||||||
|
@ -134,9 +120,11 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
||||||
throw new AggregationExecutionException("series_arithmetic script for reducer [" + name()
|
throw new AggregationExecutionException("series_arithmetic script for reducer [" + name()
|
||||||
+ "] must return a Number");
|
+ "] must return a Number");
|
||||||
}
|
}
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), FUNCTION));
|
final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter,
|
aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter,
|
||||||
new ArrayList<PipelineAggregator>(), metaData()));
|
new ArrayList<>(), metaData()));
|
||||||
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs),
|
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs),
|
||||||
(InternalMultiBucketAggregation.InternalBucket) bucket);
|
(InternalMultiBucketAggregation.InternalBucket) bucket);
|
||||||
newBuckets.add(newBucket);
|
newBuckets.add(newBucket);
|
||||||
|
|
|
@ -40,21 +40,19 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||||
|
|
||||||
public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
||||||
|
|
||||||
public final static Type TYPE = new Type("cumulative_sum");
|
public final static Type TYPE = new Type("cumulative_sum");
|
||||||
|
|
||||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||||
@Override
|
CumulativeSumPipelineAggregator result = new CumulativeSumPipelineAggregator();
|
||||||
public CumulativeSumPipelineAggregator readResult(StreamInput in) throws IOException {
|
result.readFrom(in);
|
||||||
CumulativeSumPipelineAggregator result = new CumulativeSumPipelineAggregator();
|
return result;
|
||||||
result.readFrom(in);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public static void registerStreams() {
|
public static void registerStreams() {
|
||||||
|
@ -88,8 +86,9 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
||||||
for (InternalHistogram.Bucket bucket : buckets) {
|
for (InternalHistogram.Bucket bucket : buckets) {
|
||||||
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);
|
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);
|
||||||
sum += thisBucketValue;
|
sum += thisBucketValue;
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
AGGREGATION_TRANFORM_FUNCTION));
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalSimpleValue(name(), sum, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
aggs.add(new InternalSimpleValue(name(), sum, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||||
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(),
|
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(),
|
||||||
new InternalAggregations(aggs), bucket.getKeyed(), bucket.getFormatter());
|
new InternalAggregations(aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||||
|
|
|
@ -41,8 +41,9 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||||
|
|
||||||
public class DerivativePipelineAggregator extends PipelineAggregator {
|
public class DerivativePipelineAggregator extends PipelineAggregator {
|
||||||
|
@ -100,8 +101,9 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
|
||||||
if (xAxisUnits != null) {
|
if (xAxisUnits != null) {
|
||||||
xDiff = (thisBucketKey - lastBucketKey) / xAxisUnits;
|
xDiff = (thisBucketKey - lastBucketKey) / xAxisUnits;
|
||||||
}
|
}
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
AGGREGATION_TRANFORM_FUNCTION));
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||||
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.pipeline.having;
|
package org.elasticsearch.search.aggregations.pipeline.having;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -28,7 +27,6 @@ import org.elasticsearch.script.ExecutableScript;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
|
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
|
||||||
import org.elasticsearch.search.aggregations.Aggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||||
|
@ -64,13 +62,6 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator {
|
||||||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
|
||||||
@Override
|
|
||||||
public InternalAggregation apply(Aggregation input) {
|
|
||||||
return (InternalAggregation) input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
|
|
||||||
private Script script;
|
private Script script;
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.pipeline.movavg;
|
package org.elasticsearch.search.aggregations.pipeline.movavg;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.EvictingQueue;
|
import com.google.common.collect.EvictingQueue;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -48,8 +47,9 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ListIterator;
|
import java.util.ListIterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||||
|
|
||||||
public class MovAvgPipelineAggregator extends PipelineAggregator {
|
public class MovAvgPipelineAggregator extends PipelineAggregator {
|
||||||
|
@ -69,13 +69,6 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
||||||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
|
||||||
@Override
|
|
||||||
public InternalAggregation apply(Aggregation input) {
|
|
||||||
return (InternalAggregation) input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private ValueFormatter formatter;
|
private ValueFormatter formatter;
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
private int window;
|
private int window;
|
||||||
|
@ -134,7 +127,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
||||||
if (model.hasValue(values.size())) {
|
if (model.hasValue(values.size())) {
|
||||||
double movavg = model.next(values);
|
double movavg = model.next(values);
|
||||||
|
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||||
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||||
|
@ -175,7 +170,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
||||||
InternalHistogram.Bucket bucket = (InternalHistogram.Bucket) newBuckets.get(lastValidPosition + i + 1);
|
InternalHistogram.Bucket bucket = (InternalHistogram.Bucket) newBuckets.get(lastValidPosition + i + 1);
|
||||||
|
|
||||||
// Get the existing aggs in the bucket so we don't clobber data
|
// Get the existing aggs in the bucket so we don't clobber data
|
||||||
aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||||
|
|
||||||
InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(
|
InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(
|
||||||
|
|
|
@ -39,8 +39,9 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||||
|
|
||||||
|
@ -48,13 +49,10 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
||||||
|
|
||||||
public final static Type TYPE = new Type("serial_diff");
|
public final static Type TYPE = new Type("serial_diff");
|
||||||
|
|
||||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||||
@Override
|
SerialDiffPipelineAggregator result = new SerialDiffPipelineAggregator();
|
||||||
public SerialDiffPipelineAggregator readResult(StreamInput in) throws IOException {
|
result.readFrom(in);
|
||||||
SerialDiffPipelineAggregator result = new SerialDiffPipelineAggregator();
|
return result;
|
||||||
result.readFrom(in);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public static void registerStreams() {
|
public static void registerStreams() {
|
||||||
|
@ -114,7 +112,9 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
||||||
if (!Double.isNaN(thisBucketValue) && !Double.isNaN(lagValue)) {
|
if (!Double.isNaN(thisBucketValue) && !Double.isNaN(lagValue)) {
|
||||||
double diff = thisBucketValue - lagValue;
|
double diff = thisBucketValue - lagValue;
|
||||||
|
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||||
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.search.builder;
|
package org.elasticsearch.search.builder;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.ElasticsearchGenerationException;
|
import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.action.support.QuerySourceBuilder;
|
import org.elasticsearch.action.support.QuerySourceBuilder;
|
||||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||||
|
@ -175,7 +175,7 @@ public class SearchSourceBuilder extends ToXContentToBytes {
|
||||||
* Constructs a new search source builder with a raw search query.
|
* Constructs a new search source builder with a raw search query.
|
||||||
*/
|
*/
|
||||||
public SearchSourceBuilder query(String queryString) {
|
public SearchSourceBuilder query(String queryString) {
|
||||||
return query(queryString.getBytes(Charsets.UTF_8));
|
return query(queryString.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -214,7 +214,7 @@ public class SearchSourceBuilder extends ToXContentToBytes {
|
||||||
* (and not aggs for example).
|
* (and not aggs for example).
|
||||||
*/
|
*/
|
||||||
public SearchSourceBuilder postFilter(String postFilterString) {
|
public SearchSourceBuilder postFilter(String postFilterString) {
|
||||||
return postFilter(postFilterString.getBytes(Charsets.UTF_8));
|
return postFilter(postFilterString.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -63,8 +63,8 @@ import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -413,7 +413,9 @@ public class SearchPhaseController extends AbstractComponent {
|
||||||
if (aggregations != null) {
|
if (aggregations != null) {
|
||||||
List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators();
|
List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators();
|
||||||
if (pipelineAggregators != null) {
|
if (pipelineAggregators != null) {
|
||||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> {
|
||||||
|
return (InternalAggregation) p;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||||
bigArrays, scriptService, headersContext));
|
bigArrays, scriptService, headersContext));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.transport.netty;
|
package org.elasticsearch.transport.netty;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -724,7 +724,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
} else if (e.getCause() instanceof SizeHeaderFrameDecoder.HttpOnTransportException) {
|
} else if (e.getCause() instanceof SizeHeaderFrameDecoder.HttpOnTransportException) {
|
||||||
// in case we are able to return data, serialize the exception content and sent it back to the client
|
// in case we are able to return data, serialize the exception content and sent it back to the client
|
||||||
if (ctx.getChannel().isOpen()) {
|
if (ctx.getChannel().isOpen()) {
|
||||||
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(Charsets.UTF_8));
|
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(StandardCharsets.UTF_8));
|
||||||
ChannelFuture channelFuture = ctx.getChannel().write(buffer);
|
ChannelFuture channelFuture = ctx.getChannel().write(buffer);
|
||||||
channelFuture.addListener(new ChannelFutureListener() {
|
channelFuture.addListener(new ChannelFutureListener() {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.bulk;
|
package org.elasticsearch.action.bulk;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
@ -34,7 +34,7 @@ public class BulkIntegrationIT extends ESIntegTestCase {
|
||||||
public void testBulkIndexCreatesMapping() throws Exception {
|
public void testBulkIndexCreatesMapping() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/bulk-log.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/bulk-log.json");
|
||||||
BulkRequestBuilder bulkBuilder = client().prepareBulk();
|
BulkRequestBuilder bulkBuilder = client().prepareBulk();
|
||||||
bulkBuilder.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
bulkBuilder.get();
|
bulkBuilder.get();
|
||||||
assertBusy(new Runnable() {
|
assertBusy(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.bulk;
|
package org.elasticsearch.action.bulk;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.elasticsearch.action.ActionRequest;
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
@ -52,7 +52,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
|
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
|
||||||
}
|
}
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||||
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
|
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
|
||||||
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
|
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
|
||||||
|
@ -63,7 +63,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
public void testSimpleBulk2() throws Exception {
|
public void testSimpleBulk2() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
public void testSimpleBulk3() throws Exception {
|
public void testSimpleBulk3() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
public void testSimpleBulk4() throws Exception {
|
public void testSimpleBulk4() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
||||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
||||||
|
@ -102,14 +102,14 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
public void testBulkAllowExplicitIndex() throws Exception {
|
public void testBulkAllowExplicitIndex() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
||||||
try {
|
try {
|
||||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), null, null, false);
|
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false);
|
||||||
fail();
|
fail();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
||||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), "test", null, false);
|
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -131,7 +131,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
try {
|
try {
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
fail("should have thrown an exception about the wrong format of line 1");
|
fail("should have thrown an exception about the wrong format of line 1");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about the wrong format of line 1: " + e.getMessage(),
|
assertThat("message contains error about the wrong format of line 1: " + e.getMessage(),
|
||||||
|
@ -144,7 +144,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
try {
|
try {
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
fail("should have thrown an exception about the wrong format of line 5");
|
fail("should have thrown an exception about the wrong format of line 5");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about the wrong format of line 5: " + e.getMessage(),
|
assertThat("message contains error about the wrong format of line 5: " + e.getMessage(),
|
||||||
|
@ -157,7 +157,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
try {
|
try {
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
fail("should have thrown an exception about the unknown paramater _foo");
|
fail("should have thrown an exception about the unknown paramater _foo");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(),
|
assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(),
|
||||||
|
@ -170,7 +170,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
try {
|
try {
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
fail("should have thrown an exception about the wrong format of line 3");
|
fail("should have thrown an exception about the wrong format of line 3");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about the wrong format of line 3: " + e.getMessage(),
|
assertThat("message contains error about the wrong format of line 3: " + e.getMessage(),
|
||||||
|
@ -182,7 +182,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
public void testSimpleBulk10() throws Exception {
|
public void testSimpleBulk10() throws Exception {
|
||||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk10.json");
|
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk10.json");
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||||
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.broadcast;
|
package org.elasticsearch.broadcast;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.action.count.CountResponse;
|
import org.elasticsearch.action.count.CountResponse;
|
||||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -72,7 +72,7 @@ public class BroadcastActionsIT extends ESIntegTestCase {
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
// test failed (simply query that can't be parsed)
|
// test failed (simply query that can't be parsed)
|
||||||
try {
|
try {
|
||||||
client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet();
|
client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(StandardCharsets.UTF_8))).actionGet();
|
||||||
} catch(SearchPhaseExecutionException e) {
|
} catch(SearchPhaseExecutionException e) {
|
||||||
assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries));
|
assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries));
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.routing.allocation;
|
package org.elasticsearch.cluster.routing.allocation;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
import org.elasticsearch.cluster.routing.*;
|
import org.elasticsearch.cluster.routing.*;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.test.ESAllocationTestCase;
|
import org.elasticsearch.test.ESAllocationTestCase;
|
||||||
import org.junit.Ignore;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
@ -58,7 +57,7 @@ public abstract class CatAllocationTestCase extends ESAllocationTestCase {
|
||||||
public void run() throws IOException {
|
public void run() throws IOException {
|
||||||
Set<String> nodes = new HashSet<>();
|
Set<String> nodes = new HashSet<>();
|
||||||
Map<String, Idx> indices = new HashMap<>();
|
Map<String, Idx> indices = new HashMap<>();
|
||||||
try (BufferedReader reader = Files.newBufferedReader(getCatPath(), Charsets.UTF_8)) {
|
try (BufferedReader reader = Files.newBufferedReader(getCatPath(), StandardCharsets.UTF_8)) {
|
||||||
String line = null;
|
String line = null;
|
||||||
// regexp FTW
|
// regexp FTW
|
||||||
Pattern pattern = Pattern.compile("^(.+)\\s+(\\d)\\s+([rp])\\s+(STARTED|RELOCATING|INITIALIZING|UNASSIGNED)\\s+\\d+\\s+[0-9.a-z]+\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+).*$");
|
Pattern pattern = Pattern.compile("^(.+)\\s+(\\d)\\s+([rp])\\s+(STARTED|RELOCATING|INITIALIZING|UNASSIGNED)\\s+\\d+\\s+[0-9.a-z]+\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+).*$");
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common;
|
package org.elasticsearch.common;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ public class Base64Tests extends ESTestCase {
|
||||||
@Test // issue #6334
|
@Test // issue #6334
|
||||||
public void testBase64DecodeWithExtraCharactersAfterPadding() throws Exception {
|
public void testBase64DecodeWithExtraCharactersAfterPadding() throws Exception {
|
||||||
String plain = randomAsciiOfLengthBetween(1, 20) + ":" + randomAsciiOfLengthBetween(1, 20);
|
String plain = randomAsciiOfLengthBetween(1, 20) + ":" + randomAsciiOfLengthBetween(1, 20);
|
||||||
String encoded = Base64.encodeBytes(plain.getBytes(Charsets.UTF_8));
|
String encoded = Base64.encodeBytes(plain.getBytes(StandardCharsets.UTF_8));
|
||||||
assertValidBase64(encoded, plain);
|
assertValidBase64(encoded, plain);
|
||||||
|
|
||||||
// lets append some trash here, if the encoded string has been padded
|
// lets append some trash here, if the encoded string has been padded
|
||||||
|
@ -46,13 +46,13 @@ public class Base64Tests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertValidBase64(String base64, String expected) throws IOException {
|
private void assertValidBase64(String base64, String expected) throws IOException {
|
||||||
String decoded = new String(Base64.decode(base64.getBytes(Charsets.UTF_8)), Charsets.UTF_8);
|
String decoded = new String(Base64.decode(base64.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
|
||||||
assertThat(decoded, is(expected));
|
assertThat(decoded, is(expected));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertInvalidBase64(String base64) {
|
private void assertInvalidBase64(String base64) {
|
||||||
try {
|
try {
|
||||||
Base64.decode(base64.getBytes(Charsets.UTF_8));
|
Base64.decode(base64.getBytes(StandardCharsets.UTF_8));
|
||||||
fail(String.format(Locale.ROOT, "Expected IOException to be thrown for string %s (len %d)", base64, base64.length()));
|
fail(String.format(Locale.ROOT, "Expected IOException to be thrown for string %s (len %d)", base64, base64.length()));
|
||||||
} catch (IOException e) {}
|
} catch (IOException e) {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common;
|
package org.elasticsearch.common;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ public class PidFileTests extends ESTestCase {
|
||||||
public void testParentIsFile() throws IOException {
|
public void testParentIsFile() throws IOException {
|
||||||
Path dir = createTempDir();
|
Path dir = createTempDir();
|
||||||
Path parent = dir.resolve("foo");
|
Path parent = dir.resolve("foo");
|
||||||
try(BufferedWriter stream = Files.newBufferedWriter(parent, Charsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
try(BufferedWriter stream = Files.newBufferedWriter(parent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||||
stream.write("foo");
|
stream.write("foo");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ public class PidFileTests extends ESTestCase {
|
||||||
Path pidFile = parent.resolve("foo.pid");
|
Path pidFile = parent.resolve("foo.pid");
|
||||||
long pid = randomLong();
|
long pid = randomLong();
|
||||||
if (randomBoolean() && Files.exists(parent)) {
|
if (randomBoolean() && Files.exists(parent)) {
|
||||||
try (BufferedWriter stream = Files.newBufferedWriter(pidFile, Charsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
try (BufferedWriter stream = Files.newBufferedWriter(pidFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||||
stream.write("foo");
|
stream.write("foo");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -75,6 +75,6 @@ public class PidFileTests extends ESTestCase {
|
||||||
assertEquals(pid, inst.getPid());
|
assertEquals(pid, inst.getPid());
|
||||||
assertFalse(inst.isDeleteOnExit());
|
assertFalse(inst.isDeleteOnExit());
|
||||||
assertTrue(Files.exists(pidFile));
|
assertTrue(Files.exists(pidFile));
|
||||||
assertEquals(pid, Long.parseLong(new String(Files.readAllBytes(pidFile), Charsets.UTF_8)));
|
assertEquals(pid, Long.parseLong(new String(Files.readAllBytes(pidFile), StandardCharsets.UTF_8)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.bytes;
|
package org.elasticsearch.common.bytes;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.io.Channels;
|
import org.elasticsearch.common.io.Channels;
|
||||||
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
|
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
|
||||||
|
@ -148,7 +148,7 @@ public class ByteBufferBytesReference implements BytesReference {
|
||||||
if (!buffer.hasRemaining()) {
|
if (!buffer.hasRemaining()) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
final CharsetDecoder decoder = CharsetUtil.getDecoder(Charsets.UTF_8);
|
final CharsetDecoder decoder = CharsetUtil.getDecoder(StandardCharsets.UTF_8);
|
||||||
final CharBuffer dst = CharBuffer.allocate(
|
final CharBuffer dst = CharBuffer.allocate(
|
||||||
(int) ((double) buffer.remaining() * decoder.maxCharsPerByte()));
|
(int) ((double) buffer.remaining() * decoder.maxCharsPerByte()));
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.cli;
|
package org.elasticsearch.common.cli;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.jimfs.Configuration;
|
import com.google.common.jimfs.Configuration;
|
||||||
import com.google.common.jimfs.Jimfs;
|
import com.google.common.jimfs.Jimfs;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -153,7 +153,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
|
|
||||||
try (FileSystem fs = Jimfs.newFileSystem(configuration)) {
|
try (FileSystem fs = Jimfs.newFileSystem(configuration)) {
|
||||||
Path path = fs.getPath(randomAsciiOfLength(10));
|
Path path = fs.getPath(randomAsciiOfLength(10));
|
||||||
Files.write(path, "anything".getBytes(Charsets.UTF_8));
|
Files.write(path, "anything".getBytes(StandardCharsets.UTF_8));
|
||||||
|
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("path.home", createTempDir().toString())
|
.put("path.home", createTempDir().toString())
|
||||||
|
@ -195,7 +195,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
|
|
||||||
private Path writePath(FileSystem fs, String name, String content) throws IOException {
|
private Path writePath(FileSystem fs, String name, String content) throws IOException {
|
||||||
Path path = fs.getPath(name);
|
Path path = fs.getPath(name);
|
||||||
Files.write(path, content.getBytes(Charsets.UTF_8));
|
Files.write(path, content.getBytes(StandardCharsets.UTF_8));
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,11 +220,11 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
Path randomPath = paths[randomInt];
|
Path randomPath = paths[randomInt];
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case CHANGE:
|
case CHANGE:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
Files.setPosixFilePermissions(randomPath, Sets.newHashSet(PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.OTHERS_EXECUTE, PosixFilePermission.GROUP_EXECUTE));
|
Files.setPosixFilePermissions(randomPath, Sets.newHashSet(PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.OTHERS_EXECUTE, PosixFilePermission.GROUP_EXECUTE));
|
||||||
break;
|
break;
|
||||||
case KEEP:
|
case KEEP:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
Set<PosixFilePermission> posixFilePermissions = Files.getPosixFilePermissions(randomPath);
|
Set<PosixFilePermission> posixFilePermissions = Files.getPosixFilePermissions(randomPath);
|
||||||
Files.setPosixFilePermissions(randomPath, posixFilePermissions);
|
Files.setPosixFilePermissions(randomPath, posixFilePermissions);
|
||||||
break;
|
break;
|
||||||
|
@ -249,12 +249,12 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
Path randomPath = paths[randomInt];
|
Path randomPath = paths[randomInt];
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case CHANGE:
|
case CHANGE:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
UserPrincipal randomOwner = fs.getUserPrincipalLookupService().lookupPrincipalByName(randomAsciiOfLength(10));
|
UserPrincipal randomOwner = fs.getUserPrincipalLookupService().lookupPrincipalByName(randomAsciiOfLength(10));
|
||||||
Files.setOwner(randomPath, randomOwner);
|
Files.setOwner(randomPath, randomOwner);
|
||||||
break;
|
break;
|
||||||
case KEEP:
|
case KEEP:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
UserPrincipal originalOwner = Files.getOwner(randomPath);
|
UserPrincipal originalOwner = Files.getOwner(randomPath);
|
||||||
Files.setOwner(randomPath, originalOwner);
|
Files.setOwner(randomPath, originalOwner);
|
||||||
break;
|
break;
|
||||||
|
@ -279,12 +279,12 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
Path randomPath = paths[randomInt];
|
Path randomPath = paths[randomInt];
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case CHANGE:
|
case CHANGE:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
GroupPrincipal randomPrincipal = fs.getUserPrincipalLookupService().lookupPrincipalByGroupName(randomAsciiOfLength(10));
|
GroupPrincipal randomPrincipal = fs.getUserPrincipalLookupService().lookupPrincipalByGroupName(randomAsciiOfLength(10));
|
||||||
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(randomPrincipal);
|
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(randomPrincipal);
|
||||||
break;
|
break;
|
||||||
case KEEP:
|
case KEEP:
|
||||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||||
GroupPrincipal groupPrincipal = Files.readAttributes(randomPath, PosixFileAttributes.class).group();
|
GroupPrincipal groupPrincipal = Files.readAttributes(randomPath, PosixFileAttributes.class).group();
|
||||||
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(groupPrincipal);
|
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(groupPrincipal);
|
||||||
break;
|
break;
|
||||||
|
@ -308,7 +308,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception {
|
public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception {
|
||||||
Files.write(pathToCreate, "anything".getBytes(Charsets.UTF_8));
|
Files.write(pathToCreate, "anything".getBytes(StandardCharsets.UTF_8));
|
||||||
return CliTool.ExitStatus.OK;
|
return CliTool.ExitStatus.OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.io;
|
package org.elasticsearch.common.io;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
||||||
|
@ -29,7 +29,6 @@ import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -120,9 +119,9 @@ public class FileSystemUtilsTests extends ESTestCase {
|
||||||
public void testMoveFilesDoesNotCreateSameFileWithSuffix() throws Exception {
|
public void testMoveFilesDoesNotCreateSameFileWithSuffix() throws Exception {
|
||||||
Path[] dirs = new Path[] { createTempDir(), createTempDir(), createTempDir()};
|
Path[] dirs = new Path[] { createTempDir(), createTempDir(), createTempDir()};
|
||||||
for (Path dir : dirs) {
|
for (Path dir : dirs) {
|
||||||
Files.write(dir.resolve("file1.txt"), "file1".getBytes(Charsets.UTF_8));
|
Files.write(dir.resolve("file1.txt"), "file1".getBytes(StandardCharsets.UTF_8));
|
||||||
Files.createDirectory(dir.resolve("dir"));
|
Files.createDirectory(dir.resolve("dir"));
|
||||||
Files.write(dir.resolve("dir").resolve("file2.txt"), "file2".getBytes(Charsets.UTF_8));
|
Files.write(dir.resolve("dir").resolve("file2.txt"), "file2".getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
FileSystemUtils.moveFilesWithoutOverwriting(dirs[0], dst, ".new");
|
FileSystemUtils.moveFilesWithoutOverwriting(dirs[0], dst, ".new");
|
||||||
|
@ -137,7 +136,7 @@ public class FileSystemUtilsTests extends ESTestCase {
|
||||||
assertFileNotExists(dst.resolve("dir").resolve("file2.txt.new"));
|
assertFileNotExists(dst.resolve("dir").resolve("file2.txt.new"));
|
||||||
|
|
||||||
// change file content, make sure it gets updated
|
// change file content, make sure it gets updated
|
||||||
Files.write(dirs[2].resolve("dir").resolve("file2.txt"), "UPDATED".getBytes(Charsets.UTF_8));
|
Files.write(dirs[2].resolve("dir").resolve("file2.txt"), "UPDATED".getBytes(StandardCharsets.UTF_8));
|
||||||
FileSystemUtils.moveFilesWithoutOverwriting(dirs[2], dst, ".new");
|
FileSystemUtils.moveFilesWithoutOverwriting(dirs[2], dst, ".new");
|
||||||
assertFileContent(dst, "file1.txt", "file1");
|
assertFileContent(dst, "file1.txt", "file1");
|
||||||
assertFileContent(dst, "dir/file2.txt", "file2");
|
assertFileContent(dst, "dir/file2.txt", "file2");
|
||||||
|
@ -157,7 +156,7 @@ public class FileSystemUtilsTests extends ESTestCase {
|
||||||
Assert.assertThat("file [" + file + "] should not exist.", Files.exists(file), is(false));
|
Assert.assertThat("file [" + file + "] should not exist.", Files.exists(file), is(false));
|
||||||
} else {
|
} else {
|
||||||
assertFileExists(file);
|
assertFileExists(file);
|
||||||
String fileContent = new String(Files.readAllBytes(file), StandardCharsets.UTF_8);
|
String fileContent = new String(Files.readAllBytes(file), java.nio.charset.StandardCharsets.UTF_8);
|
||||||
// trim the string content to prevent different handling on windows vs. unix and CR chars...
|
// trim the string content to prevent different handling on windows vs. unix and CR chars...
|
||||||
Assert.assertThat(fileContent.trim(), equalTo(expected.trim()));
|
Assert.assertThat(fileContent.trim(), equalTo(expected.trim()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.io;
|
package org.elasticsearch.common.io;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -39,7 +39,7 @@ public class StreamsTests extends ESTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCopyFromInputStream() throws IOException {
|
public void testCopyFromInputStream() throws IOException {
|
||||||
byte[] content = "content".getBytes(Charsets.UTF_8);
|
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||||
long count = copy(in, out);
|
long count = copy(in, out);
|
||||||
|
@ -50,7 +50,7 @@ public class StreamsTests extends ESTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCopyFromByteArray() throws IOException {
|
public void testCopyFromByteArray() throws IOException {
|
||||||
byte[] content = "content".getBytes(Charsets.UTF_8);
|
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||||
copy(content, out);
|
copy(content, out);
|
||||||
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.lucene.store;
|
package org.elasticsearch.common.lucene.store;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -33,7 +33,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testRandomReads() throws IOException {
|
public void testRandomReads() throws IOException {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||||
assertEquals(input.length, indexInput.length());
|
assertEquals(input.length, indexInput.length());
|
||||||
assertEquals(0, indexInput.getFilePointer());
|
assertEquals(0, indexInput.getFilePointer());
|
||||||
|
@ -45,7 +45,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testRandomOverflow() throws IOException {
|
public void testRandomOverflow() throws IOException {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||||
int firstReadLen = randomIntBetween(0, input.length - 1);
|
int firstReadLen = randomIntBetween(0, input.length - 1);
|
||||||
randomReadAndSlice(indexInput, firstReadLen);
|
randomReadAndSlice(indexInput, firstReadLen);
|
||||||
|
@ -64,7 +64,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testSeekOverflow() throws IOException {
|
public void testSeekOverflow() throws IOException {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||||
int firstReadLen = randomIntBetween(0, input.length - 1);
|
int firstReadLen = randomIntBetween(0, input.length - 1);
|
||||||
randomReadAndSlice(indexInput, firstReadLen);
|
randomReadAndSlice(indexInput, firstReadLen);
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
@ -76,14 +76,14 @@ public class MultiDataPathUpgraderTests extends ESTestCase {
|
||||||
int numFiles = randomIntBetween(1, 10);
|
int numFiles = randomIntBetween(1, 10);
|
||||||
for (int i = 0; i < numFiles; i++, numIdxFiles++) {
|
for (int i = 0; i < numFiles; i++, numIdxFiles++) {
|
||||||
String filename = Integer.toString(numIdxFiles);
|
String filename = Integer.toString(numIdxFiles);
|
||||||
try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), Charsets.UTF_8)) {
|
try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), StandardCharsets.UTF_8)) {
|
||||||
w.write(filename);
|
w.write(filename);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
numFiles = randomIntBetween(1, 10);
|
numFiles = randomIntBetween(1, 10);
|
||||||
for (int i = 0; i < numFiles; i++, numTranslogFiles++) {
|
for (int i = 0; i < numFiles; i++, numTranslogFiles++) {
|
||||||
String filename = Integer.toString(numTranslogFiles);
|
String filename = Integer.toString(numTranslogFiles);
|
||||||
try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), Charsets.UTF_8)) {
|
try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), StandardCharsets.UTF_8)) {
|
||||||
w.write(filename);
|
w.write(filename);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -124,14 +124,14 @@ public class MultiDataPathUpgraderTests extends ESTestCase {
|
||||||
final String name = Integer.toString(i);
|
final String name = Integer.toString(i);
|
||||||
translogFiles.contains(translog.resolve(name + ".translog"));
|
translogFiles.contains(translog.resolve(name + ".translog"));
|
||||||
byte[] content = Files.readAllBytes(translog.resolve(name + ".translog"));
|
byte[] content = Files.readAllBytes(translog.resolve(name + ".translog"));
|
||||||
assertEquals(name , new String(content, Charsets.UTF_8));
|
assertEquals(name , new String(content, StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
final HashSet<Path> idxFiles = Sets.newHashSet(FileSystemUtils.files(idx));
|
final HashSet<Path> idxFiles = Sets.newHashSet(FileSystemUtils.files(idx));
|
||||||
for (int i = 0; i < numIdxFiles; i++) {
|
for (int i = 0; i < numIdxFiles; i++) {
|
||||||
final String name = Integer.toString(i);
|
final String name = Integer.toString(i);
|
||||||
idxFiles.contains(idx.resolve(name + ".tst"));
|
idxFiles.contains(idx.resolve(name + ".tst"));
|
||||||
byte[] content = Files.readAllBytes(idx.resolve(name + ".tst"));
|
byte[] content = Files.readAllBytes(idx.resolve(name + ".tst"));
|
||||||
assertEquals(name , new String(content, Charsets.UTF_8));
|
assertEquals(name , new String(content, StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.document;
|
package org.elasticsearch.document;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||||
|
@ -578,7 +578,7 @@ public class BulkIT extends ESIntegTestCase {
|
||||||
"{\"index\": {\"_id\": \"2\"}}\n" +
|
"{\"index\": {\"_id\": \"2\"}}\n" +
|
||||||
"{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n";
|
"{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n";
|
||||||
|
|
||||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||||
assertThat(bulkResponse.getItems().length, is(2));
|
assertThat(bulkResponse.getItems().length, is(2));
|
||||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||||
|
@ -605,7 +605,7 @@ public class BulkIT extends ESIntegTestCase {
|
||||||
"{\"index\": { \"_id\" : \"24000\" } }\n" +
|
"{\"index\": { \"_id\" : \"24000\" } }\n" +
|
||||||
"{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n";
|
"{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n";
|
||||||
|
|
||||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||||
assertThat(bulkResponse.getItems().length, is(2));
|
assertThat(bulkResponse.getItems().length, is(2));
|
||||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||||
|
@ -632,7 +632,7 @@ public class BulkIT extends ESIntegTestCase {
|
||||||
"{\"index\": {} }\n" +
|
"{\"index\": {} }\n" +
|
||||||
"{\"name\": \"Good\", \"my_id\" : \"48\"}\n";
|
"{\"name\": \"Good\", \"my_id\" : \"48\"}\n";
|
||||||
|
|
||||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||||
assertThat(bulkResponse.getItems().length, is(2));
|
assertThat(bulkResponse.getItems().length, is(2));
|
||||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||||
|
|
|
@ -18,14 +18,10 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.env;
|
package org.elasticsearch.env;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
|
||||||
import org.elasticsearch.common.io.Streams;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.http.netty;
|
package org.elasticsearch.http.netty;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.jboss.netty.bootstrap.ClientBootstrap;
|
import org.jboss.netty.bootstrap.ClientBootstrap;
|
||||||
|
@ -33,11 +31,9 @@ import java.net.SocketAddress;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.is;
|
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
|
||||||
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.HOST;
|
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.HOST;
|
||||||
import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
||||||
|
|
||||||
|
@ -46,26 +42,20 @@ import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
||||||
*/
|
*/
|
||||||
public class NettyHttpClient implements Closeable {
|
public class NettyHttpClient implements Closeable {
|
||||||
|
|
||||||
private static final Function<? super HttpResponse, String> FUNCTION_RESPONSE_TO_CONTENT = new Function<HttpResponse, String>() {
|
|
||||||
@Override
|
|
||||||
public String apply(HttpResponse response) {
|
|
||||||
return response.getContent().toString(Charsets.UTF_8);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static final Function<? super HttpResponse, String> FUNCTION_RESPONSE_OPAQUE_ID = new Function<HttpResponse, String>() {
|
|
||||||
@Override
|
|
||||||
public String apply(HttpResponse response) {
|
|
||||||
return response.headers().get("X-Opaque-Id");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public static Collection<String> returnHttpResponseBodies(Collection<HttpResponse> responses) {
|
public static Collection<String> returnHttpResponseBodies(Collection<HttpResponse> responses) {
|
||||||
return Collections2.transform(responses, FUNCTION_RESPONSE_TO_CONTENT);
|
List<String> list = new ArrayList<>(responses.size());
|
||||||
|
for (HttpResponse response : responses) {
|
||||||
|
list.add(response.getContent().toString(StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Collection<String> returnOpaqueIds(Collection<HttpResponse> responses) {
|
public static Collection<String> returnOpaqueIds(Collection<HttpResponse> responses) {
|
||||||
return Collections2.transform(responses, FUNCTION_RESPONSE_OPAQUE_ID);
|
List<String> list = new ArrayList<>(responses.size());
|
||||||
|
for (HttpResponse response : responses) {
|
||||||
|
list.add(response.headers().get("X-Opaque-Id"));
|
||||||
|
}
|
||||||
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ClientBootstrap clientBootstrap;
|
private final ClientBootstrap clientBootstrap;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.http.netty;
|
package org.elasticsearch.http.netty;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||||
import org.elasticsearch.common.network.NetworkService;
|
import org.elasticsearch.common.network.NetworkService;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -202,7 +202,7 @@ public class NettyHttpServerPipeliningTests extends ESTestCase {
|
||||||
request = (HttpRequest) e.getMessage();
|
request = (HttpRequest) e.getMessage();
|
||||||
}
|
}
|
||||||
|
|
||||||
ChannelBuffer buffer = ChannelBuffers.copiedBuffer(request.getUri(), Charsets.UTF_8);
|
ChannelBuffer buffer = ChannelBuffers.copiedBuffer(request.getUri(), StandardCharsets.UTF_8);
|
||||||
|
|
||||||
DefaultHttpResponse httpResponse = new DefaultHttpResponse(HTTP_1_1, OK);
|
DefaultHttpResponse httpResponse = new DefaultHttpResponse(HTTP_1_1, OK);
|
||||||
httpResponse.headers().add(CONTENT_LENGTH, buffer.readableBytes());
|
httpResponse.headers().add(CONTENT_LENGTH, buffer.readableBytes());
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.simple;
|
package org.elasticsearch.index.mapper.simple;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -128,7 +128,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
|
||||||
.add(object("name").add(stringField("first").store(true).index(false))),
|
.add(object("name").add(stringField("first").store(true).index(false))),
|
||||||
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
||||||
|
|
||||||
BytesReference json = new BytesArray("".getBytes(Charsets.UTF_8));
|
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
|
||||||
try {
|
try {
|
||||||
docMapper.parse("test", "person", "1", json).rootDoc();
|
docMapper.parse("test", "person", "1", json).rootDoc();
|
||||||
fail("this point is never reached");
|
fail("this point is never reached");
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.shard;
|
package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.logging.ESLogger;
|
import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -50,7 +50,7 @@ public class CommitPointsTests extends ESTestCase {
|
||||||
CommitPoint commitPoint = new CommitPoint(1, "test", CommitPoint.Type.GENERATED, indexFiles, translogFiles);
|
CommitPoint commitPoint = new CommitPoint(1, "test", CommitPoint.Type.GENERATED, indexFiles, translogFiles);
|
||||||
|
|
||||||
byte[] serialized = CommitPoints.toXContent(commitPoint);
|
byte[] serialized = CommitPoints.toXContent(commitPoint);
|
||||||
logger.info("serialized commit_point {}", new String(serialized, Charsets.UTF_8));
|
logger.info("serialized commit_point {}", new String(serialized, StandardCharsets.UTF_8));
|
||||||
|
|
||||||
CommitPoint desCp = CommitPoints.fromXContent(serialized);
|
CommitPoint desCp = CommitPoints.fromXContent(serialized);
|
||||||
assertThat(desCp.version(), equalTo(commitPoint.version()));
|
assertThat(desCp.version(), equalTo(commitPoint.version()));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
package org.elasticsearch.index.store;
|
package org.elasticsearch.index.store;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
@ -217,12 +217,12 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
try (CheckIndex checkIndex = new CheckIndex(store.directory())) {
|
try (CheckIndex checkIndex = new CheckIndex(store.directory())) {
|
||||||
BytesStreamOutput os = new BytesStreamOutput();
|
BytesStreamOutput os = new BytesStreamOutput();
|
||||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||||
checkIndex.setInfoStream(out);
|
checkIndex.setInfoStream(out);
|
||||||
out.flush();
|
out.flush();
|
||||||
CheckIndex.Status status = checkIndex.checkIndex();
|
CheckIndex.Status status = checkIndex.checkIndex();
|
||||||
if (!status.clean) {
|
if (!status.clean) {
|
||||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
throw new IOException("index check failure");
|
throw new IOException("index check failure");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indexlifecycle;
|
package org.elasticsearch.indexlifecycle;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||||
|
@ -30,7 +28,6 @@ import org.elasticsearch.cluster.routing.RoutingNode;
|
||||||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.set.Sets;
|
|
||||||
import org.elasticsearch.discovery.Discovery;
|
import org.elasticsearch.discovery.Discovery;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
|
@ -38,6 +35,8 @@ import org.elasticsearch.test.InternalTestCluster;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||||
|
@ -222,12 +221,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) {
|
private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) {
|
||||||
final Set<String> keySet = Sets.newHashSet(Iterables.transform(routingNodes, new Function<RoutingNode, String>() {
|
final Set<String> keySet = StreamSupport.stream(routingNodes.spliterator(), false).map((p) -> (p.nodeId())).collect(Collectors.toSet());
|
||||||
@Override
|
|
||||||
public String apply(RoutingNode input) {
|
|
||||||
return input.nodeId();
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
assertThat(keySet, containsInAnyOrder(nodes));
|
assertThat(keySet, containsInAnyOrder(nodes));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.plugins;
|
package org.elasticsearch.plugins;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -30,8 +29,8 @@ import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
|
||||||
public class PluginInfoTests extends ESTestCase {
|
public class PluginInfoTests extends ESTestCase {
|
||||||
|
@ -260,10 +259,10 @@ public class PluginInfoTests extends ESTestCase {
|
||||||
public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
|
public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
|
||||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||||
writeProperties(pluginDir,
|
writeProperties(pluginDir,
|
||||||
"description", "fake desc",
|
"description", "fake desc",
|
||||||
"name", "my_plugin",
|
"name", "my_plugin",
|
||||||
"version", "1.0",
|
"version", "1.0",
|
||||||
"site", "true");
|
"site", "true");
|
||||||
try {
|
try {
|
||||||
PluginInfo.readFromProperties(pluginDir);
|
PluginInfo.readFromProperties(pluginDir);
|
||||||
fail("didn't get expected exception");
|
fail("didn't get expected exception");
|
||||||
|
@ -281,12 +280,7 @@ public class PluginInfoTests extends ESTestCase {
|
||||||
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
|
|
||||||
final List<PluginInfo> infos = pluginsInfo.getInfos();
|
final List<PluginInfo> infos = pluginsInfo.getInfos();
|
||||||
List<String> names = eagerTransform(infos, new Function<PluginInfo, String>() {
|
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
|
||||||
@Override
|
|
||||||
public String apply(PluginInfo input) {
|
|
||||||
return input.getName();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertThat(names, contains("a", "b", "c", "d", "e"));
|
assertThat(names, contains("a", "b", "c", "d", "e"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.plugins;
|
package org.elasticsearch.plugins;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.hash.Hashing;
|
import com.google.common.hash.Hashing;
|
||||||
|
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.Base64;
|
||||||
import org.elasticsearch.common.cli.CliTool;
|
import org.elasticsearch.common.cli.CliTool;
|
||||||
import org.elasticsearch.common.cli.CliTool.ExitStatus;
|
import org.elasticsearch.common.cli.CliTool.ExitStatus;
|
||||||
import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal;
|
import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||||
|
@ -57,7 +56,6 @@ import java.io.BufferedWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.FileVisitResult;
|
import java.nio.file.FileVisitResult;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
@ -112,7 +110,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
private void writeSha1(Path file, boolean corrupt) throws IOException {
|
private void writeSha1(Path file, boolean corrupt) throws IOException {
|
||||||
String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString();
|
String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString();
|
||||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), Charsets.UTF_8)) {
|
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), StandardCharsets.UTF_8)) {
|
||||||
out.write(sha1Hex);
|
out.write(sha1Hex);
|
||||||
if (corrupt) {
|
if (corrupt) {
|
||||||
out.write("bad");
|
out.write("bad");
|
||||||
|
@ -122,7 +120,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
private void writeMd5(Path file, boolean corrupt) throws IOException {
|
private void writeMd5(Path file, boolean corrupt) throws IOException {
|
||||||
String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString();
|
String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString();
|
||||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), Charsets.UTF_8)) {
|
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), StandardCharsets.UTF_8)) {
|
||||||
out.write(md5Hex);
|
out.write(md5Hex);
|
||||||
if (corrupt) {
|
if (corrupt) {
|
||||||
out.write("bad");
|
out.write("bad");
|
||||||
|
@ -618,7 +616,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
||||||
assertThat(requests, hasSize(1));
|
assertThat(requests, hasSize(1));
|
||||||
String msg = String.format(Locale.ROOT, "Request header did not contain Authorization header, terminal output was: %s", terminal.getTerminalOutput());
|
String msg = String.format(Locale.ROOT, "Request header did not contain Authorization header, terminal output was: %s", terminal.getTerminalOutput());
|
||||||
assertThat(msg, requests.get(0).headers().contains("Authorization"), is(true));
|
assertThat(msg, requests.get(0).headers().contains("Authorization"), is(true));
|
||||||
assertThat(msg, requests.get(0).headers().get("Authorization"), is("Basic " + Base64.encodeBytes("user:pass".getBytes(Charsets.UTF_8))));
|
assertThat(msg, requests.get(0).headers().get("Authorization"), is("Basic " + Base64.encodeBytes("user:pass".getBytes(StandardCharsets.UTF_8))));
|
||||||
} finally {
|
} finally {
|
||||||
HttpsURLConnection.setDefaultSSLSocketFactory(defaultSocketFactory);
|
HttpsURLConnection.setDefaultSSLSocketFactory(defaultSocketFactory);
|
||||||
serverBootstrap.releaseExternalResources();
|
serverBootstrap.releaseExternalResources();
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.basic;
|
package org.elasticsearch.search.basic;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.basic;
|
package org.elasticsearch.search.basic;
|
||||||
|
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.suggest;
|
package org.elasticsearch.search.suggest;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import com.google.common.io.Resources;
|
import com.google.common.io.Resources;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||||
|
@ -506,7 +506,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
||||||
assertAcked(builder.addMapping("type1", mapping));
|
assertAcked(builder.addMapping("type1", mapping));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
|
||||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), Charsets.UTF_8)) {
|
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) {
|
||||||
index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line);
|
index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line);
|
||||||
}
|
}
|
||||||
refresh();
|
refresh();
|
||||||
|
@ -698,7 +698,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
||||||
assertAcked(builder.addMapping("type1", mapping));
|
assertAcked(builder.addMapping("type1", mapping));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
|
||||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), Charsets.UTF_8)) {
|
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) {
|
||||||
index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
|
index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
|
||||||
}
|
}
|
||||||
refresh();
|
refresh();
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.suggest.phrase;
|
package org.elasticsearch.search.suggest.phrase;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenFilter;
|
import org.apache.lucene.analysis.TokenFilter;
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
@ -86,7 +86,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
|
|
||||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||||
IndexWriter writer = new IndexWriter(dir, conf);
|
IndexWriter writer = new IndexWriter(dir, conf);
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||||
String line = null;
|
String line = null;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -228,7 +228,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
|
|
||||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||||
IndexWriter writer = new IndexWriter(dir, conf);
|
IndexWriter writer = new IndexWriter(dir, conf);
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||||
String line = null;
|
String line = null;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -315,7 +315,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
|
|
||||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||||
IndexWriter writer = new IndexWriter(dir, conf);
|
IndexWriter writer = new IndexWriter(dir, conf);
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||||
String line = null;
|
String line = null;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.test;
|
package org.elasticsearch.test;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ public class StreamsUtils {
|
||||||
if (is == null) {
|
if (is == null) {
|
||||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]");
|
throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]");
|
||||||
}
|
}
|
||||||
return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8));
|
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String copyToStringFromClasspath(String path) throws IOException {
|
public static String copyToStringFromClasspath(String path) throws IOException {
|
||||||
|
@ -43,7 +43,7 @@ public class StreamsUtils {
|
||||||
if (is == null) {
|
if (is == null) {
|
||||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||||
}
|
}
|
||||||
return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8));
|
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.test.store;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.store.*;
|
import org.apache.lucene.store.*;
|
||||||
|
@ -149,7 +149,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
||||||
}
|
}
|
||||||
try (CheckIndex checkIndex = new CheckIndex(dir)) {
|
try (CheckIndex checkIndex = new CheckIndex(dir)) {
|
||||||
BytesStreamOutput os = new BytesStreamOutput();
|
BytesStreamOutput os = new BytesStreamOutput();
|
||||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||||
checkIndex.setInfoStream(out);
|
checkIndex.setInfoStream(out);
|
||||||
out.flush();
|
out.flush();
|
||||||
CheckIndex.Status status = checkIndex.checkIndex();
|
CheckIndex.Status status = checkIndex.checkIndex();
|
||||||
|
@ -157,11 +157,11 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
||||||
ESTestCase.checkIndexFailed = true;
|
ESTestCase.checkIndexFailed = true;
|
||||||
logger.warn("check index [failure] index files={}\n{}",
|
logger.warn("check index [failure] index files={}\n{}",
|
||||||
Arrays.toString(dir.listAll()),
|
Arrays.toString(dir.listAll()),
|
||||||
new String(os.bytes().toBytes(), Charsets.UTF_8));
|
new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
throw new IOException("index check failure");
|
throw new IOException("index check failure");
|
||||||
} else {
|
} else {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.transport;
|
package org.elasticsearch.transport;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
@ -88,10 +88,10 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
|
||||||
String data = randomMethod + " / HTTP/1.1";
|
String data = randomMethod + " / HTTP/1.1";
|
||||||
|
|
||||||
try (Socket socket = new Socket(host, port)) {
|
try (Socket socket = new Socket(host, port)) {
|
||||||
socket.getOutputStream().write(data.getBytes(Charsets.UTF_8));
|
socket.getOutputStream().write(data.getBytes(StandardCharsets.UTF_8));
|
||||||
socket.getOutputStream().flush();
|
socket.getOutputStream().flush();
|
||||||
|
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), Charsets.UTF_8))) {
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) {
|
||||||
assertThat(reader.readLine(), is("This is not a HTTP port"));
|
assertThat(reader.readLine(), is("This is not a HTTP port"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -100,7 +100,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testThatNothingIsReturnedForOtherInvalidPackets() throws Exception {
|
public void testThatNothingIsReturnedForOtherInvalidPackets() throws Exception {
|
||||||
try (Socket socket = new Socket(host, port)) {
|
try (Socket socket = new Socket(host, port)) {
|
||||||
socket.getOutputStream().write("FOOBAR".getBytes(Charsets.UTF_8));
|
socket.getOutputStream().write("FOOBAR".getBytes(StandardCharsets.UTF_8));
|
||||||
socket.getOutputStream().flush();
|
socket.getOutputStream().flush();
|
||||||
|
|
||||||
// end of stream
|
// end of stream
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
package org.elasticsearch.transport.netty;
|
package org.elasticsearch.transport.netty;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||||
import org.elasticsearch.common.component.Lifecycle;
|
import org.elasticsearch.common.component.Lifecycle;
|
||||||
|
@ -238,7 +238,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
|
||||||
|
|
||||||
assertThat(socket.isConnected(), is(true));
|
assertThat(socket.isConnected(), is(true));
|
||||||
try (OutputStream os = socket.getOutputStream()) {
|
try (OutputStream os = socket.getOutputStream()) {
|
||||||
os.write("foo".getBytes(Charsets.UTF_8));
|
os.write("foo".getBytes(StandardCharsets.UTF_8));
|
||||||
os.flush();
|
os.flush();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.validate;
|
package org.elasticsearch.validate;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||||
|
@ -68,7 +68,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
||||||
|
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(Charsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
|
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(StandardCharsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
|
||||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_id:1")).execute().actionGet().isValid(), equalTo(true));
|
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_id:1")).execute().actionGet().isValid(), equalTo(true));
|
||||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
|
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
||||||
|
|
||||||
for (Client client : internalCluster()) {
|
for (Client client : internalCluster()) {
|
||||||
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
|
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
|
||||||
.setSource("foo".getBytes(Charsets.UTF_8))
|
.setSource("foo".getBytes(StandardCharsets.UTF_8))
|
||||||
.setExplain(true)
|
.setExplain(true)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(response.isValid(), equalTo(false));
|
assertThat(response.isValid(), equalTo(false));
|
||||||
|
|
|
@ -107,7 +107,10 @@ com.google.common.util.concurrent.SettableFuture
|
||||||
com.google.common.util.concurrent.Futures
|
com.google.common.util.concurrent.Futures
|
||||||
com.google.common.util.concurrent.MoreExecutors
|
com.google.common.util.concurrent.MoreExecutors
|
||||||
com.google.common.collect.ImmutableSortedMap
|
com.google.common.collect.ImmutableSortedMap
|
||||||
|
com.google.common.base.Charsets
|
||||||
|
com.google.common.base.Function
|
||||||
|
com.google.common.collect.Collections2
|
||||||
|
|
||||||
@defaultMessage Do not violate java's access system
|
@defaultMessage Do not violate java's access system
|
||||||
java.lang.reflect.AccessibleObject#setAccessible(boolean)
|
java.lang.reflect.AccessibleObject#setAccessible(boolean)
|
||||||
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
|
@ -27,23 +27,15 @@ import com.google.api.client.json.jackson2.JacksonFactory;
|
||||||
import com.google.api.services.compute.Compute;
|
import com.google.api.services.compute.Compute;
|
||||||
import com.google.api.services.compute.model.Instance;
|
import com.google.api.services.compute.model.Instance;
|
||||||
import com.google.api.services.compute.model.InstanceList;
|
import com.google.api.services.compute.model.InstanceList;
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.CollectionUtils;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.GeneralSecurityException;
|
import java.security.GeneralSecurityException;
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -61,37 +53,30 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Instance> instances() {
|
public Collection<Instance> instances() {
|
||||||
|
logger.debug("get instances for project [{}], zones [{}]", project, zones);
|
||||||
logger.debug("get instances for project [{}], zones [{}]", project, zones);
|
final List<Instance> instances = zones.stream().map((zoneId) -> {
|
||||||
|
try {
|
||||||
List<List<Instance>> instanceListByZone = eagerTransform(zones, new Function<String, List<Instance>>() {
|
Compute.Instances.List list = client().instances().list(project, zoneId);
|
||||||
@Override
|
InstanceList instanceList = list.execute();
|
||||||
public List<Instance> apply(String zoneId) {
|
if (instanceList.isEmpty()) {
|
||||||
try {
|
return Collections.EMPTY_LIST;
|
||||||
Compute.Instances.List list = client().instances().list(project, zoneId);
|
|
||||||
InstanceList instanceList = list.execute();
|
|
||||||
if (instanceList.isEmpty()) {
|
|
||||||
return Collections.EMPTY_LIST;
|
|
||||||
}
|
|
||||||
|
|
||||||
return instanceList.getItems();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.warn("Problem fetching instance list for zone {}", zoneId);
|
|
||||||
logger.debug("Full exception:", e);
|
|
||||||
|
|
||||||
return Collections.EMPTY_LIST;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
return instanceList.getItems();
|
||||||
|
} catch (IOException e) {
|
||||||
// Collapse instances from all zones into one neat list
|
logger.warn("Problem fetching instance list for zone {}", zoneId);
|
||||||
List<Instance> instanceList = CollectionUtils.iterableAsArrayList(Iterables.concat(instanceListByZone));
|
logger.debug("Full exception:", e);
|
||||||
|
return Collections.EMPTY_LIST;
|
||||||
if (instanceList.size() == 0) {
|
|
||||||
logger.warn("disabling GCE discovery. Can not get list of nodes");
|
|
||||||
}
|
}
|
||||||
|
}).reduce(new ArrayList<>(), (a, b) -> {
|
||||||
|
a.addAll(b);
|
||||||
|
return a;
|
||||||
|
});
|
||||||
|
|
||||||
return instanceList;
|
if (instances.isEmpty()) {
|
||||||
|
logger.warn("disabling GCE discovery. Can not get list of nodes");
|
||||||
|
}
|
||||||
|
|
||||||
|
return instances;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Compute client;
|
private Compute client;
|
||||||
|
|
Loading…
Reference in New Issue